diff options
50 files changed, 2330 insertions, 148 deletions
diff --git a/scripts/jobs/scala-release-2.11.x-build b/scripts/jobs/scala-release-2.11.x-build new file mode 100755 index 0000000000..21fbb8fa76 --- /dev/null +++ b/scripts/jobs/scala-release-2.11.x-build @@ -0,0 +1,626 @@ +#!/bin/bash -e +# requirements: +# sbtCmd must point to sbt from sbt-extras (this is the standard on the Scala jenkins, so we only support that one) +# - ~/.sonatype-curl that consists of user = USER:PASS +# - ~/.m2/settings.xml with credentials for sonatype + # <server> + # <id>private-repo</id> + # <username>jenkinside</username> + # <password></password> + # </server> +# - ~/.credentials (for sonatype) + # realm=Sonatype Nexus Repository Manager + # host=oss.sonatype.org + # user=lamp + # password= +# - ~/.credentials-private-repo for private-repo.typesafe.com, as follows: + # realm=Artifactory Realm + # host=private-repo.typesafe.com + # user=jenkinside + # password= +# - ~/.sbt/0.13/plugins/gpg.sbt with: +# addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8.1") + +# Modus operandi: +# +# Determine Scala version as: +# +# $SCALA_VER_BASE$SCALA_VER_SUFFIX (if former variable is set) +# By parsing the tag (if HEAD is tagged as v$base$suffix) +# By parsing build.number for the base version, suffixing with -$sha-nightly +# Serialize these versions to jenkins.properties, which are passed downstream to scala-release-2.11.x-dist. +# This also removes the need to tag scala/scala-dist (not possible for nightlies, still encouraged for releases, but not a hard requirement). +# +# Determine Module Versions +# +# When running in "versions.properties" mode (the default), derive tags from these versions and build, publishing only those modules that are not available yet. +# Otherwise, build HEAD for all modules, derive a -nightly version for them. +# Bootstrap: +# +# Build minimal core of Scala as this version (aka locker), publish to private-repo +# Build modules required to bootstrap, publish to private-repo +# Build Scala using the previously built core and bootstrap modules, publish to private-repo This overwrites the minimal core on private-repo +# Stage to sonatype (unless building a -nightly release): +# +# Stage this Scala build on sonatype +# Rebuild modules with this Scala build, and stage them on sonatype as well +# This script can be run in multiple modes. It is design to work without any input, +# so that it could be run in Travis CI. In that mode, it'll build a release when +# the current HEAD of the checkout in $WORKSPACE is tagged, and stage to sonatype. Otherwise, +# it'll build a nightly. +# +# Since the nightlies are intended to be a drop in replacement, all modules are built with the +# full Scala version as their binary version, so that you can just set scalaVersion to the +# nightly's sha-derived version and be good to go. +# +# The other way to trigger a release is by setting the SCALA_VER_BASE env var. +# +# By default, we build the versions of the modules as specified by versions.properties +# (as specified in the HEAD commit). Set moduleVersioning to something random +# to trigger building HEAD of each module, generating a fresh -$(git describe)-nightly version for each. +# +# PS: set publishToSonatype to anything but "yes" to avoid publishing to sonatype +# (publishing only done when $WORKSPACE checkout's HEAD is tagged / SCALA_VER_BASE is set.) + + +# set to something besides the default to build nightly snapshots of the modules instead of some tagged version +moduleVersioning=${moduleVersioning-"versions.properties"} + +baseDir=${WORKSPACE-`pwd`} +publishPrivateTask=${publishPrivateTask-"publish"} +publishSonatypeTaskCore=${publishSonatypeTaskCore-"publish-signed"} +publishSonatypeTaskModules=${publishSonatypeTaskModules-"publish-signed"} +publishLockerPrivateTask=${publishLockerPrivateTask-$publishPrivateTask} # set to "init" to speed up testing of the script (if you already built locker before) + +sbtCmd=${sbtCmd-sbt} # TESTING (this is a marker for defaults to change when testing locally: should be sbtx on my mac) + +# 0.13.5 does not respect "set every scalaVersion", see +# https://github.com/scala/scala-parser-combinators/pull/27 +sbtCmd="$sbtCmd -sbt-version 0.13.2" + +forceRebuild=${forceRebuild-no} + +# publishToSonatype +# set to anything but "yes" to avoid publishing to sonatype +# overridden to "no" when no SCALA_VER_BASE is passed and HEAD is not tagged with a valid version tag +# + +antBuildTask="${antBuildTask-nightly}" # TESTING leave empty to avoid the sanity check (don't set it to "init" because ant will croak) +clean="clean" # TESTING leave empty to speed up testing + +scriptsDir="$WORKSPACE/scripts" + +# This is for forcibly stopping the job from a subshell (see test +# below). +trap "exit 1" TERM +export TOP_PID=$$ +set -e + +# Known problems : does not fare well with interrupted, partial +# compilations. We should perhaps have a multi-dependency version +# of do_i_have below + +LOGGINGDIR="$WORKSPACE/logs" +mkdir -p $LOGGINGDIR + +unset SBT_HOME +SBT_HOME="$WORKSPACE/.sbt" +mkdir -p $SBT_HOME +IVY_CACHE="$WORKSPACE/.ivy2" +mkdir -p $IVY_CACHE +rm -rf $IVY_CACHE/cache/org.scala-lang + +# temp dir where all 'non-build' operation are performed +TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX) +TMP_DIR="${TMP_ROOT_DIR}/tmp" +mkdir "${TMP_DIR}" + + +# detect sed version and how to enable extended regexes +SEDARGS="-n$(if (echo "a" | sed -nE "s/a/b/" &> /dev/null); then echo E; else echo r; fi)" + + + +# :docstring test: +# Usage: test <argument ..> +# Executes <argument ..>, logging the launch of the command to the +# main log file, and kills global script execution with the TERM +# signal if the commands ends up failing. +# DO NOT USE ON FUNCTIONS THAT DECLARE VARIABLES, +# AS YOU'LL BE RUNNING IN A SUBSHELL AND VARIABLE DECLARATIONS WILL BE LOST +# :end docstring: + +function test() { + echo "### $@" + "$@" + status=$? + if [ $status -ne 0 ]; then + say "### ERROR with $1" + kill -s TERM $TOP_PID + fi +} + +# :docstring say: +# Usage: say <argument ..> +# Prints <argument ..> to both console and the main log file. +# :end docstring: + +function say(){ + (echo "$@") | tee -a $LOGGINGDIR/compilation-$SCALADATE-$SCALAHASH.log +} + + +# we must change ivy home to get a fresh ivy cache, otherwise we get half-bootstrapped scala +# rm it in case it existed (and there's no ivy2-shadow, which indicates we're running in a TESTING environment)... +# we don't nuke the whole ws since that clobbers the git clones needlessly +[[ -d $baseDir/ivy2-shadow ]] || rm -rf $baseDir/ivy2 +mkdir -p $baseDir/ivy2 + +rm -rf $baseDir/resolutionScratch_ +mkdir -p $baseDir/resolutionScratch_ + +# repo used to publish "locker" scala to (to start the bootstrap) +privateCred="private-repo" +privateRepo="http://private-repo.typesafe.com/typesafe/scala-release-temp/" + +function parseScalaProperties(){ + propFile="$baseDir/$1" + if [ ! -f $propFile ]; then + echo "Property file $propFile not found." + exit 1 + else + awk -f "$scriptsDir/readproperties.awk" "$propFile" > "$propFile.sh" + . "$propFile.sh" # yeah yeah, not that secure, improvements welcome (I tried, but bash made me cry again) + fi +} + +##### git +gfxd() { + git clean -fxd # TESTING +} + +update() { + [[ -d $baseDir ]] || mkdir -p $baseDir + cd $baseDir + + if [ ! -d $baseDir/$2 ]; then git clone "https://github.com/$1/$2.git"; fi + + cd $2 + + git fetch --tags "https://github.com/$1/$2.git" + (git fetch "https://github.com/$1/$2.git" $3 && git checkout -q FETCH_HEAD) #|| git checkout -q $3 # || fallback is for local testing on tag + git reset --hard +} + +##### sonatype interface + +stApi="https://oss.sonatype.org/service/local" + +function st_curl(){ + curl -H "Content-Type: application/json" -H "accept: application/json,application/vnd.siesta-error-v1+json,application/vnd.siesta-validation-errors-v1+json" -K ~/.sonatype-curl -s -o - $@ +} + +function st_stagingReposOpen() { + st_curl "$stApi/staging/profile_repositories" | jq '.data[] | select(.profileName == "org.scala-lang") | select(.type == "open")' +} + +function st_stagingRepoDrop() { + repo=$1 + message=$2 + echo "{\"data\":{\"description\":\"$message\",\"stagedRepositoryIds\":[\"$repo\"]}}" | st_curl -X POST -d @- "$stApi/staging/bulk/drop" +} + +function st_stagingRepoClose() { + repo=$1 + message=$2 + echo "{\"data\":{\"description\":\"$message\",\"stagedRepositoryIds\":[\"$repo\"]}}" | st_curl -X POST -d @- "$stApi/staging/bulk/close" +} + + +# ARGH trying to get this to work on multiple versions of sbt-extras... +# the old version (on jenkins, and I don't want to upgrade for risk of breaking other builds) honors -sbt-dir +# the new version of sbt-extras ignores sbt-dir, so we pass it in as -Dsbt.global.base +# need to set sbt-dir to one that has the gpg.sbt plugin config +sbtArgs="-no-colors -ivy $baseDir/ivy2 -Dsbt.override.build.repos=true -Dsbt.repository.config=$scriptsDir/repositories-scala-release -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13" + +sbtBuild() { + echo "### sbtBuild: "$sbtCmd $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" + $sbtCmd $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" >> $baseDir/logs/builds 2>&1 +} + +sbtResolve() { + cd $baseDir/resolutionScratch_ + touch build.sbt + cross=${4-binary} # Disabled / binary / full + echo "### sbtResolve: $sbtCmd $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" + $sbtCmd $sbtArgs "${scalaVersionTasks[@]}" \ + "set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \ + 'show update' >> $baseDir/logs/resolution 2>&1 +} + +# Oh boy... can't use scaladoc to document scala-xml/scala-parser-combinators +# if scaladoc depends on the same version of scala-xml/scala-parser-combinators. +# Even if that version is available through the project's resolvers, sbt won't look past this project. +# SOOOOO, we set the version to a dummy (-DOC), generate documentation, +# then set the version to the right one and publish (which won't re-gen the docs). +# Also tried publish-local without docs using 'set publishArtifact in (Compile, packageDoc) := false' and republishing, no dice. + +# Each buildModule() function is invoked twice: first to build against locker and publish to private-repo, then +# to build against the release and publish to sonatype (or publish-local if publishToSonatype is not "yes"). +# In the second round, sbtResolve is always true: the module will be found in the private-repo! +# Therefore, if MODULE_BUILT is "yes" (in the second round), we know that we need to build (and publish) the +# module again. +# +# Note: we tried an alternative solution in which sbtResolve would not look at private-repo, but that fails. For example, +# scala-xml depends on scala-library, so sbt tries to find the scala-library of the version that we are currently building, +# which exists only in private-repo. + +buildXML() { + if [ "$XML_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-xml" $XML_VER ) + then echo "Found scala-xml $XML_VER; not building." + else + update scala scala-xml "$XML_REF" && gfxd + sbtBuild 'set version := "'$XML_VER'-DOC"' $clean doc 'set version := "'$XML_VER'"' test "${buildTasks[@]}" + XML_BUILT="yes" # ensure the module is built and published when buildXML is invoked for the second time, see comment above + fi +} + +buildParsers() { + if [ "$PARSERS_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-parser-combinators" $PARSERS_VER ) + then echo "Found scala-parser-combinators $PARSERS_VER; not building." + else + update scala scala-parser-combinators "$PARSERS_REF" && gfxd + sbtBuild 'set version := "'$PARSERS_VER'-DOC"' $clean doc 'set version := "'$PARSERS_VER'"' test "${buildTasks[@]}" + PARSERS_BUILT="yes" + fi +} + +buildPartest() { + if [ "$PARTEST_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-partest" $PARTEST_VER ) + then echo "Found scala-partest $PARTEST_VER; not building." + else + update scala scala-partest "$PARTEST_REF" && gfxd + sbtBuild 'set version :="'$PARTEST_VER'"' 'set VersionKeys.scalaXmlVersion := "'$XML_VER'"' 'set VersionKeys.scalaCheckVersion := "'$SCALACHECK_VER'"' $clean test "${buildTasks[@]}" + PARTEST_BUILT="yes" + fi +} + +# buildPartestIface() { +# if [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-partest-interface" $PARTEST_IFACE_VER ) +# then echo "Found scala-partest-interface $PARTEST_IFACE_VER; not building." +# else +# update scala scala-partest-interface "$PARTEST_IFACE_REF" && gfxd +# sbtBuild 'set version :="'$PARTEST_IFACE_VER'"' $clean "${buildTasks[@]}" +# fi +# } + +buildContinuations() { + if [ "$CONT_PLUG_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.plugins" "scala-continuations-plugin" $CONTINUATIONS_VER full ) + then echo "Found scala-continuations-plugin $CONTINUATIONS_VER; not building." + else + update scala scala-continuations $CONTINUATIONS_REF && gfxd + + $sbtCmd $sbtArgs 'project plugin' "${scalaVersionTasks[@]}" "${publishTasks[@]}" \ + 'set version := "'$CONTINUATIONS_VER'"' $clean "compile:package" test "${buildTasks[@]}" # https://github.com/scala/scala-continuations/pull/4 + CONT_PLUG_BUILT="yes" + fi + + if [ "$CONT_LIB_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.plugins" "scala-continuations-library" $CONTINUATIONS_VER ) + then echo "Found scala-continuations-library $CONTINUATIONS_VER; not building." + else + update scala scala-continuations $CONTINUATIONS_REF && gfxd + $sbtCmd $sbtArgs 'project library' "${scalaVersionTasks[@]}" "${publishTasks[@]}" \ + 'set version := "'$CONTINUATIONS_VER'"' $clean test "${buildTasks[@]}" + CONT_LIB_BUILT="yes" + fi +} + +buildSwing() { + if [ "$SWING_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-swing" $SWING_VER ) + then echo "Found scala-swing $SWING_VER; not building." + else + update scala scala-swing "$SWING_REF" && gfxd + sbtBuild 'set version := "'$SWING_VER'"' $clean test "${buildTasks[@]}" + SWING_BUILT="yes" + fi +} + +buildActorsMigration(){ + if [ "$ACTORS_MIGRATION_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang" "scala-actors-migration" $ACTORS_MIGRATION_VER ) + then echo "Found scala-actors-migration $ACTORS_MIGRATION_VER; not building." + else + update scala actors-migration "$ACTORS_MIGRATION_REF" && gfxd + # not running tests because + # [error] Test scala.actors.migration.NestedReact.testNestedReactAkka failed: java.util.concurrent.TimeoutException: Futures timed out after [20 seconds] + sbtBuild 'set version := "'$ACTORS_MIGRATION_VER'"' 'set VersionKeys.continuationsVersion := "'$CONTINUATIONS_VER'"' $clean "${buildTasks[@]}" + ACTORS_MIGRATION_BUILT="yes" + fi +} + +buildScalacheck(){ + if [ "$SCALACHECK_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scalacheck" "scalacheck" $SCALACHECK_VER ) + then echo "Found scalacheck $SCALACHECK_VER; not building." + else + update rickynils scalacheck $SCALACHECK_REF && gfxd + sbtBuild 'set version := "'$SCALACHECK_VER'"' 'set VersionKeys.scalaParserCombinatorsVersion := "'$PARSERS_VER'"' $clean $publishPrivateTask # test times out NOTE: never published to sonatype + SCALACHECK_BUILT="yes" + fi +} + +# build modules, using ${buildTasks[@]} (except for Scalacheck, which is hard-coded to publish to private-repo) +buildModules() { + buildXML + buildParsers + buildContinuations + buildSwing + buildActorsMigration + buildScalacheck + buildPartest + # buildPartestIface +} + + +## BUILD STEPS: + +determineScalaVersion() { + cd $WORKSPACE + parseScalaProperties "versions.properties" + + if [ -z "$SCALA_VER_BASE" ]; then + echo "No SCALA_VER_BASE specified." + + scalaTag=$(git describe --exact-match ||:) + + SCALA_BINARY_VER=${SCALA_BINARY_VER-"$scala_binary_version"} + + if [ -z "$scalaTag" ] + then + echo "No tag found, building nightly snapshot." + parseScalaProperties "build.number" + SCALA_VER_BASE="$version_major.$version_minor.$version_patch" + SCALA_VER_SUFFIX="-$(git rev-parse --short HEAD)-nightly" + SCALADOC_SOURCE_LINKS_VER=$(git rev-parse HEAD) + + # TODO: publish nightly snapshot using this script + publishToSonatype="no" + echo "repo_ref=2.11.x" >> $baseDir/jenkins.properties # for the -dist downstream jobs that build the actual archives + else + echo "HEAD is tagged as $scalaTag." + # borrowed from https://github.com/cloudflare/semver_bash/blob/master/semver.sh + local RE='v*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)\([0-9A-Za-z-]*\)' # don't change this to make it more accurate, it's not worth it + SCALA_VER_BASE="$(echo $scalaTag | sed -e "s#$RE#\1.\2.\3#")" + SCALA_VER_SUFFIX="$(echo $scalaTag | sed -e "s#$RE#\4#")" + SCALADOC_SOURCE_LINKS_VER=$scalaTag + + if [ "$SCALA_VER_BASE" == "$scalaTag" ]; then + echo "Could not parse version $scalaTag" + exit 1 + fi + publishToSonatype=${publishToSonatype-"yes"} # unless forced previously, publish + fi + else + publishToSonatype=${publishToSonatype-"yes"} # unless forced previously, publish + # if version base/suffix are provided, we assume a corresponding tag exists for the scaladoc source links + SCALADOC_SOURCE_LINKS_VER="v$SCALA_VER_BASE$SCALA_VER_SUFFIX" + fi + + SCALA_VER="$SCALA_VER_BASE$SCALA_VER_SUFFIX" + echo "version=$SCALA_VER" >> $baseDir/jenkins.properties + echo "sbtDistVersionOverride=-Dproject.version=$SCALA_VER" >> $baseDir/jenkins.properties + + # We don't override the scala binary version: when running in -nightly + versions.properties versioning mode, + # we intend to be a drop-in replacement -- all you need to do is change the Scala version + # In order to override this, add 'set every scalaBinaryVersion := "'$SCALA_BINARY_VER'"', + # which, when used with pre-release Scala version numbers, will require tweaking at the sbt usage site as well. + scalaVersionTasks=('set every scalaVersion := "'$SCALA_VER'"') + + echo "Building Scala $SCALA_VER." +} + +deriveVersion() { + update $1 $2 $3 &> /dev/null + echo "$(git describe --match=v* | cut -dv -f2)-nightly" +} + +deriveVersionAnyTag() { + update $1 $2 $3 &> /dev/null + echo "$(git describe | cut -dv -f2)-nightly" +} + +# determineScalaVersion must have been called +deriveModuleVersions() { + if [ "$moduleVersioning" == "versions.properties" ] + then + # use versions.properties as defaults when no version specified on command line + XML_VER=${XML_VER-$scala_xml_version_number} + PARSERS_VER=${PARSERS_VER-$scala_parser_combinators_version_number} + CONTINUATIONS_VER=${CONTINUATIONS_VER-$scala_continuations_plugin_version_number} + SWING_VER=${SWING_VER-$scala_swing_version_number} + ACTORS_MIGRATION_VER=${ACTORS_MIGRATION_VER-$actors_migration_version_number} + PARTEST_VER=${PARTEST_VER-$partest_version_number} + SCALACHECK_VER=${SCALACHECK_VER-$scalacheck_version_number} + + # If a _VER was not specified, the corresponding _REF will be non-empty by now (as specified, or HEAD) + XML_REF=${XML_REF-"v$XML_VER"} + PARSERS_REF=${PARSERS_REF-"v$PARSERS_VER"} + CONTINUATIONS_REF=${CONTINUATIONS_REF-"v$CONTINUATIONS_VER"} + SWING_REF=${SWING_REF-"v$SWING_VER"} + ACTORS_MIGRATION_REF=${ACTORS_MIGRATION_REF-"v$ACTORS_MIGRATION_VER"} + PARTEST_REF=${PARTEST_REF-"v$PARTEST_VER"} + # PARTEST_IFACE_REF=${PARTEST_IFACE_REF-"v$PARTEST_IFACE_VER"} + SCALACHECK_REF=${SCALACHECK_REF-"$SCALACHECK_VER"} + else + XML_VER=${XML_VER-$(deriveVersion scala scala-xml "$XML_REF")} + PARSERS_VER=${PARSERS_VER-$(deriveVersion scala scala-parser-combinators "$PARSERS_REF")} + CONTINUATIONS_VER=${CONTINUATIONS_VER-$(deriveVersion scala scala-continuations "$CONTINUATIONS_REF")} + SWING_VER=${SWING_VER-$(deriveVersion scala scala-swing "$SWING_REF")} + ACTORS_MIGRATION_VER=${ACTORS_MIGRATION_VER-$(deriveVersion scala actors-migration "$ACTORS_MIGRATION_REF")} + PARTEST_VER=${PARTEST_VER-$(deriveVersion scala scala-partest "$PARTEST_REF")} + SCALACHECK_VER=${SCALACHECK_VER-$(deriveVersionAnyTag rickynils scalacheck "$SCALACHECK_REF")} + + XML_REF=${XML_REF-"HEAD"} + PARSERS_REF=${PARSERS_REF-"HEAD"} + CONTINUATIONS_REF=${CONTINUATIONS_REF-"HEAD"} + SWING_REF=${SWING_REF-"HEAD"} + ACTORS_MIGRATION_REF=${ACTORS_MIGRATION_REF-"HEAD"} + PARTEST_REF=${PARTEST_REF-"HEAD"} + # PARTEST_IFACE_REF=${PARTEST_IFACE_REF-"HEAD"} + SCALACHECK_REF=${SCALACHECK_REF-"HEAD"} + fi + + echo "Module versions (versioning strategy: $moduleVersioning):" + echo "ACTORS_MIGRATION = $ACTORS_MIGRATION_VER at $ACTORS_MIGRATION_REF" + echo "CONTINUATIONS = $CONTINUATIONS_VER at $CONTINUATIONS_REF" + echo "PARSERS = $PARSERS_VER at $PARSERS_REF" + echo "PARTEST = $PARTEST_VER at $PARTEST_REF" + echo "SCALACHECK = $SCALACHECK_VER at $SCALACHECK_REF" + echo "SWING = $SWING_VER at $SWING_REF" + echo "XML = $XML_VER at $XML_REF" + + # PARTEST_IFACE_VER=${PARTEST_IFACE_VER-$(deriveVersion scala scala-partest-interface "$PARTEST_IFACE_REF")} +} + +constructUpdatedModuleVersions() { + updatedModuleVersions=() + + # force the new module versions for building the core. these may be different from the values in versions.properties, + # either because the variables (XML_VER) were provided, or because we're building the modules from HEAD. + # in the common case, the values are the same as in versions.properties. + updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dactors-migration.version.number=$ACTORS_MIGRATION_VER") + updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-continuations-library.version.number=$CONTINUATIONS_VER") + updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-continuations-plugin.version.number=$CONTINUATIONS_VER") + updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-parser-combinators.version.number=$PARSERS_VER") + updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-swing.version.number=$SWING_VER") + updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-xml.version.number=$XML_VER") + + updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dpartest.version.number=$PARTEST_VER") + updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscalacheck.version.number=$SCALACHECK_VER") + + # allow overriding the akka-actors and jline version using a jenkins build parameter + if [ ! -z "$AKKA_ACTOR_VER" ]; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dakka-actor.version.number=$AKKA_ACTOR_VER"); fi + if [ ! -z "$JLINE_VER" ] ; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Djline.version=$JLINE_VER"); fi + + if [ ! -z "$SCALA_BINARY_VER" ]; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala.binary.version=$SCALA_BINARY_VER"); fi + if [ ! -z "$SCALA_FULL_VER" ] ; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala.full.version=$SCALA_FULL_VER"); fi +} + +# build locker (scala + modules) and quick, publishing everything to private-repo +bootstrap() { + echo "### Bootstrapping" + + cd $WORKSPACE + + #### LOCKER + + echo "### Building locker" + + # for bootstrapping, publish core (or at least smallest subset we can get away with) + # so that we can build modules with this version of Scala and publish them locally + # must publish under $SCALA_VER so that the modules will depend on this (binary) version of Scala + # publish more than just core: partest needs scalap + # in sabbus lingo, the resulting Scala build will be used as starr to build the released Scala compiler + ant -Dmaven.version.number=$SCALA_VER\ + -Dremote.snapshot.repository=NOPE\ + -Dremote.release.repository=$privateRepo\ + -Drepository.credentials.id=$privateCred\ + -Dscalac.args.optimise=-optimise\ + -Ddocs.skip=1\ + -Dlocker.skip=1\ + $publishLockerPrivateTask >> $baseDir/logs/builds 2>&1 + + + echo "### Building modules using locker" + + # build, test and publish modules with this core + # publish to our internal repo (so we can resolve the modules in the scala build below) + # we only need to build the modules necessary to build Scala itself + # since the version of locker and quick are the same + publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-private-repo")' "set every publishTo := Some(\"private-repo\" at \"$privateRepo\")") + buildTasks=($publishPrivateTask) + buildModules + + constructUpdatedModuleVersions + + #### QUICK + + echo "### Bootstrapping Scala using locker" + + # # TODO: close all open staging repos so that we can be reaonably sure the only open one we see after publishing below is ours + # # the ant call will create a new one + # + # Rebuild Scala with these modules so that all binary versions are consistent. + # Update versions.properties to new modules. + # Sanity check: make sure the Scala test suite passes / docs can be generated with these modules. + # don't skip locker (-Dlocker.skip=1), or stability will fail + # overwrite "locker" version of scala at private-repo with bootstrapped version + cd $baseDir + rm -rf build/ # must leave everything else in $baseDir for downstream jobs + + ant -Dstarr.version=$SCALA_VER\ + -Dextra.repo.url=$privateRepo\ + -Dmaven.version.suffix=$SCALA_VER_SUFFIX\ + ${updatedModuleVersions[@]} \ + -Dupdate.versions=1\ + -Dscaladoc.git.commit=$SCALADOC_SOURCE_LINKS_VER\ + -Dremote.snapshot.repository=NOPE\ + -Dremote.release.repository=$privateRepo\ + -Drepository.credentials.id=$privateCred\ + -Dscalac.args.optimise=-optimise\ + $antBuildTask $publishPrivateTask + + # clear ivy cache (and to be sure, local as well), so the next round of sbt builds sees the fresh scala + rm -rf $baseDir/ivy2 + + # TODO: create PR with following commit (note that release will have been tagged already) + # git commit versions.properties -m"Bump versions.properties for $SCALA_VER." +} + +# assumes we just bootstrapped, and current directory is $baseDir +# publishes locker to sonatype, then builds modules again (those for which version numbers were provided), +# and publishes those to sonatype as well +# finally, the staging repos are closed +publishSonatype() { + # stage to sonatype, along with all modules -Dmaven.version.suffix/-Dbuild.release not necessary, + # since we're just publishing an existing build + echo "### Publishing core to sonatype" + ant -Dmaven.version.number=$SCALA_VER $publishSonatypeTaskCore + + echo "### Publishing modules to sonatype" + # build/test/publish scala core modules to sonatype (this will start a new staging repo) + # (was hoping we could make everything go to the same staging repo, but it's not timing that causes two staging repos to be opened) + # NOTE: only publish those for which versions are set + # test and publish to sonatype, assuming you have ~/.sbt/0.13/sonatype.sbt and ~/.sbt/0.13/plugin/gpg.sbt + publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-sonatype")' "set pgpPassphrase := Some(Array.empty)") + buildTasks=($publishSonatypeTaskModules) + buildModules + + open=$(st_stagingReposOpen) + allOpenUrls=$(echo $open | jq '.repositoryURI' | tr -d \") + allOpen=$(echo $open | jq '.repositoryId' | tr -d \") + + echo "Closing open repos: $allOpen" + + for repo in $allOpen; do st_stagingRepoClose $repo; done + + echo "Closed sonatype staging repos: $allOpenUrls." +} + + +#### MAIN + +determineScalaVersion + +deriveModuleVersions + +bootstrap + +if [ "$publishToSonatype" == "yes" ] + then publishSonatype + else # build modules one more time, just to mimic the regular build as much when running as nightly + echo "### Rebuilding modules with quick, publishing to $baseDir/ivy/local" + buildTasks=(publish-local) + # buildScalacheck always uses publishPrivateTask (not buildTasks). we override it to avoid publishing to private-repo. + publishPrivateTask="publish-local" + forceRebuild="yes" + buildModules +fi diff --git a/scripts/readproperties.awk b/scripts/readproperties.awk new file mode 100644 index 0000000000..96da94775b --- /dev/null +++ b/scripts/readproperties.awk @@ -0,0 +1,39 @@ +# Adapted from http://stackoverflow.com/questions/1682442/reading-java-properties-file-from-bash/2318840#2318840 +BEGIN { + FS="="; + n=""; + v=""; + c=0; # Not a line continuation. +} +/^\#/ { # The line is a comment. Breaks line continuation. + c=0; + next; +} +/\\$/ && (c==0) && (NF>=2) { # Name value pair with a line continuation... + e=index($0,"="); + n=substr($0,1,e-1); + v=substr($0,e+1,length($0) - e - 1); # Trim off the backslash. + c=1; # Line continuation mode. + next; +} +/^[^\\]+\\$/ && (c==1) { # Line continuation. Accumulate the value. + v= "" v substr($0,1,length($0)-1); + next; +} +((c==1) || (NF>=2)) && !/^[^\\]+\\$/ { # End of line continuation, or a single line name/value pair + if (c==0) { # Single line name/value pair + e=index($0,"="); + n=substr($0,1,e-1); + v=substr($0,e+1,length($0) - e); + } else { # Line continuation mode - last line of the value. + c=0; # Turn off line continuation mode. + v= "" v $0; + } + # Make sure the name is a legal shell variable name + gsub(/[^A-Za-z0-9_]/,"_",n); + # Silently drop everything that might confuse bash. + gsub(/[\n\r\\\t'"\$!]/,"",v); + print "export " n "=\"" v "\" || echo \"Failed to set " n "\""; # don't make bash crap out when a property could not be parsed + n = ""; + v = ""; +} diff --git a/scripts/repositories-scala-release b/scripts/repositories-scala-release new file mode 100644 index 0000000000..00538a08ff --- /dev/null +++ b/scripts/repositories-scala-release @@ -0,0 +1,7 @@ +[repositories] + plugins: http://dl.bintray.com/sbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] + private-repo: http://private-repo.typesafe.com/typesafe/scala-release-temp/ + typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + sbt-plugin-releases: http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] + maven-central + local
\ No newline at end of file diff --git a/spec/06-expressions.md b/spec/06-expressions.md index afd1492744..5038ebb34d 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -410,6 +410,19 @@ The final result of the transformation is a block of the form } ``` +### Signature Polymorphic Methods + +For invocations of signature polymorphic methods of the target platform `$f$($e_1 , \ldots , e_m$)`, +the invoked function has a different method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$` at each call +site. The parameter types `$T_ , \ldots , T_n$` are the types of the argument expressions +`$e_1 , \ldots , e_m$` and `$U$` is the expected type at the call site. If the expected type is +undefined then `$U$` is `scala.AnyRef`. The parameter names `$p_1 , \ldots , p_n$` are fresh. + +###### Note + +On the Java platform version 7 and later, the methods `invoke` and `invokeExact` in class +`java.lang.invoke.MethodHandle` are signature polymorphic. + ## Method Values ```ebnf diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index d9f56b47fa..06623b39cd 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1495,7 +1495,7 @@ abstract class GenICode extends SubComponent { if (!settings.optimise) { if (l.tpe <:< BoxedNumberClass.tpe) { if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum - else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar + else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumObject // will be externalEqualsNumChar in 2.12, SI-9030 else platform.externalEqualsNumObject } else platform.externalEquals } else { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala index 328ec8a033..a5f33aa786 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala @@ -162,4 +162,32 @@ final class BCodeAsmCommon[G <: Global](val global: G) { assoc.collectFirst { case (`nme`.value, LiteralAnnotArg(Constant(value: Symbol))) => value }).flatten.getOrElse(AnnotationRetentionPolicyClassValue) + + def implementedInterfaces(classSym: Symbol): List[Symbol] = { + // Additional interface parents based on annotations and other cues + def newParentForAnnotation(ann: AnnotationInfo): Option[Type] = ann.symbol match { + case RemoteAttr => Some(RemoteInterfaceClass.tpe) + case _ => None + } + + def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait + + val allParents = classSym.info.parents ++ classSym.annotations.flatMap(newParentForAnnotation) + + // We keep the superClass when computing minimizeParents to eliminate more interfaces. + // Example: T can be eliminated from D + // trait T + // class C extends T + // class D extends C with T + val interfaces = erasure.minimizeParents(allParents) match { + case superClass :: ifs if !isInterfaceOrTrait(superClass.typeSymbol) => + ifs + case ifs => + // minimizeParents removes the superclass if it's redundant, for example: + // trait A + // class C extends Object with A // minimizeParents removes Object + ifs + } + interfaces.map(_.typeSymbol) + } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index daf36ce374..062daa4eac 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -1225,7 +1225,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val equalsMethod: Symbol = { if (l.tpe <:< BoxedNumberClass.tpe) { if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum - else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar + else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumObject // will be externalEqualsNumChar in 2.12, SI-9030 else platform.externalEqualsNumObject } else platform.externalEquals } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 3b7cbd6392..2238221c83 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -114,7 +114,7 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { val superClass = if (superClassSym == NoSymbol) None else Some(classBTypeFromSymbol(superClassSym)) - val interfaces = getSuperInterfaces(classSym).map(classBTypeFromSymbol) + val interfaces = implementedInterfaces(classSym).map(classBTypeFromSymbol) val flags = javaFlags(classSym) @@ -182,28 +182,6 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { classBType } - /** - * All interfaces implemented by a class, except for those inherited through the superclass. - * - * TODO @lry share code with GenASM - */ - private def getSuperInterfaces(classSym: Symbol): List[Symbol] = { - - // Additional interface parents based on annotations and other cues - def newParentForAnnotation(ann: AnnotationInfo): Symbol = ann.symbol match { - case RemoteAttr => RemoteInterfaceClass - case _ => NoSymbol - } - - val superInterfaces0: List[Symbol] = classSym.mixinClasses - val superInterfaces = existingSymbols(superInterfaces0 ++ classSym.annotations.map(newParentForAnnotation)).distinct - - assert(!superInterfaces.contains(NoSymbol), s"found NoSymbol among: ${superInterfaces.mkString(", ")}") - assert(superInterfaces.forall(s => s.isInterface || s.isTrait), s"found non-interface among: ${superInterfaces.mkString(", ")}") - - erasure.minimizeInterfaces(superInterfaces.map(_.info)).map(_.typeSymbol) - } - private def buildNestedInfo(innerClassSym: Symbol): Option[NestedInfo] = { assert(innerClassSym.isClass, s"Cannot build NestedInfo for non-class symbol $innerClassSym") diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index e56a20c2e7..7626df312e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -1206,22 +1206,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => def serialVUID: Option[Long] = genBCode.serialVUID(clasz.symbol) - private def getSuperInterfaces(c: IClass): Array[String] = { - - // Additional interface parents based on annotations and other cues - def newParentForAttr(ann: AnnotationInfo): Symbol = ann.symbol match { - case RemoteAttr => RemoteInterfaceClass - case _ => NoSymbol - } - - val ps = c.symbol.info.parents - val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses - val superInterfaces = existingSymbols(superInterfaces0 ++ c.symbol.annotations.map(newParentForAttr)).distinct - - if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY - else mkArray(erasure.minimizeInterfaces(superInterfaces.map(_.info)).map(t => javaName(t.typeSymbol))) - } - var clasz: IClass = _ // this var must be assigned only by genClass() var jclass: asm.ClassWriter = _ // the classfile being emitted var thisName: String = _ // the internal name of jclass @@ -1242,7 +1226,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => val ps = c.symbol.info.parents val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol) - val ifaces = getSuperInterfaces(c) + val ifaces: Array[String] = implementedInterfaces(c.symbol).map(javaName)(collection.breakOut) val thisSignature = getGenericSignature(c.symbol, c.symbol.owner) val flags = mkFlags( diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala index 273112b93c..08f15438fe 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -55,7 +55,7 @@ class LocalOpt(settings: ScalaSettings) { * @return `true` if unreachable code was elminated in some method, `false` otherwise. */ def methodOptimizations(clazz: ClassNode): Boolean = { - settings.Yopt.value.nonEmpty && clazz.methods.asScala.foldLeft(false) { + !settings.YoptNone && clazz.methods.asScala.foldLeft(false) { case (changed, method) => methodOptimizations(method, clazz.name) || changed } } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 18e639b81c..fc02f6ff56 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -242,6 +242,7 @@ trait ScalaSettings extends AbsScalaSettings descr = "Enable optimizations", domain = YoptChoices) + def YoptNone = Yopt.isSetByUser && Yopt.value.isEmpty def YoptUnreachableCode = !Yopt.isSetByUser || Yopt.contains(YoptChoices.unreachableCode) def YoptSimplifyJumps = Yopt.contains(YoptChoices.simplifyJumps) def YoptRecurseUnreachableJumps = Yopt.contains(YoptChoices.recurseUnreachableJumps) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index b6af19250e..79833e273d 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -185,22 +185,22 @@ abstract class Erasure extends AddInterfaces private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType] - /* Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents. + /* Drop redundant types (ones which are implemented by some other parent) from the immediate parents. * This is important on Android because there is otherwise an interface explosion. */ - def minimizeInterfaces(lstIfaces: List[Type]): List[Type] = { - var rest = lstIfaces - var leaves = List.empty[Type] - while(!rest.isEmpty) { + def minimizeParents(parents: List[Type]): List[Type] = { + var rest = parents + var leaves = collection.mutable.ListBuffer.empty[Type] + while(rest.nonEmpty) { val candidate = rest.head val nonLeaf = leaves exists { t => t.typeSymbol isSubClass candidate.typeSymbol } if(!nonLeaf) { - leaves = candidate :: (leaves filterNot { t => candidate.typeSymbol isSubClass t.typeSymbol }) + leaves = leaves filterNot { t => candidate.typeSymbol isSubClass t.typeSymbol } + leaves += candidate } rest = rest.tail } - - leaves.reverse + leaves.toList } @@ -220,7 +220,7 @@ abstract class Erasure extends AddInterfaces case _ => tps } - val minParents = minimizeInterfaces(parents) + val minParents = minimizeParents(parents) val validParents = if (isTraitSignature) // java is unthrilled about seeing interfaces inherit from classes diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 75d2cfe0f2..da6527bc0b 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -122,9 +122,20 @@ trait Logic extends Debugging { // symbols are propositions final class Sym private[PropositionalLogic] (val variable: Var, val const: Const) extends Prop { + + override def equals(other: scala.Any): Boolean = other match { + case that: Sym => this.variable == that.variable && + this.const == that.const + case _ => false + } + + override def hashCode(): Int = { + variable.hashCode * 41 + const.hashCode + } + private val id: Int = Sym.nextSymId - override def toString = variable + "=" + const + "#" + id + override def toString = s"$variable=$const#$id" } object Sym { @@ -370,9 +381,11 @@ trait Logic extends Debugging { val EmptyModel: Model val NoModel: Model + final case class Solution(model: Model, unassigned: List[Sym]) + def findModelFor(solvable: Solvable): Model - def findAllModelsFor(solvable: Solvable): List[Model] + def findAllModelsFor(solvable: Solvable): List[Solution] } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 8650f6ef90..d3a5507273 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -6,6 +6,8 @@ package scala.tools.nsc.transform.patmat +import scala.annotation.tailrec +import scala.collection.immutable.{IndexedSeq, Iterable} import scala.language.postfixOps import scala.collection.mutable import scala.reflect.internal.util.Statistics @@ -266,7 +268,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT // the type of the binder passed to the first invocation // determines the type of the tree that'll be returned for that binder as of then final def binderToUniqueTree(b: Symbol) = - unique(accumSubst(normalize(CODE.REF(b))), b.tpe) + unique(accumSubst(normalize(gen.mkAttributedStableRef(b))), b.tpe) // note that the sequencing of operations is important: must visit in same order as match execution // binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders @@ -514,8 +516,16 @@ trait MatchAnalysis extends MatchApproximation { // find the models (under which the match fails) val matchFailModels = findAllModelsFor(propToSolvable(matchFails)) + val scrutVar = Var(prevBinderTree) - val counterExamples = matchFailModels.flatMap(modelToCounterExample(scrutVar)) + val counterExamples = { + matchFailModels.flatMap { + model => + val varAssignments = expandModel(model) + varAssignments.flatMap(modelToCounterExample(scrutVar) _) + } + } + // sorting before pruning is important here in order to // keep neg/t7020.scala stable // since e.g. List(_, _) would cover List(1, _) @@ -587,6 +597,8 @@ trait MatchAnalysis extends MatchApproximation { case object WildcardExample extends CounterExample { override def toString = "_" } case object NoExample extends CounterExample { override def toString = "??" } + // returns a mapping from variable to + // equal and notEqual symbols def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] = model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs => val (trues, falses) = xs.partition(_._2) @@ -600,20 +612,110 @@ trait MatchAnalysis extends MatchApproximation { v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment }.mkString("\n") - // return constructor call when the model is a true counter example - // (the variables don't take into account type information derived from other variables, - // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _), - // since we didn't realize the tail of the outer cons was a Nil) - def modelToCounterExample(scrutVar: Var)(model: Model): Option[CounterExample] = { + /** + * The models we get from the DPLL solver need to be mapped back to counter examples. + * However there's no precalculated mapping model -> counter example. Even worse, + * not every valid model corresponds to a valid counter example. + * The reason is that restricting the valid models further would for example require + * a quadratic number of additional clauses. So to keep the optimistic case fast + * (i.e., all cases are covered in a pattern match), the infeasible counter examples + * are filtered later. + * + * The DPLL procedure keeps the literals that do not contribute to the solution + * unassigned, e.g., for `(a \/ b)` + * only {a = true} or {b = true} is required and the other variable can have any value. + * + * This function does a smart expansion of the model and avoids models that + * have conflicting mappings. + * + * For example for in case of the given set of symbols (taken from `t7020.scala`): + * "V2=2#16" + * "V2=6#19" + * "V2=5#18" + * "V2=4#17" + * "V2=7#20" + * + * One possibility would be to group the symbols by domain but + * this would only work for equality tests and would not be compatible + * with type tests. + * Another observation leads to a much simpler algorithm: + * Only one of these symbols can be set to true, + * since `V2` can at most be equal to one of {2,6,5,4,7}. + */ + def expandModel(solution: Solution): List[Map[Var, (Seq[Const], Seq[Const])]] = { + + val model = solution.model + // x1 = ... // x1.hd = ... // x1.tl = ... // x1.hd.hd = ... // ... val varAssignment = modelToVarAssignment(model) + debug.patmat("var assignment for model " + model + ":\n" + varAssignmentString(varAssignment)) + + // group symbols that assign values to the same variables (i.e., symbols are mutually exclusive) + // (thus the groups are sets of disjoint assignments to variables) + val groupedByVar: Map[Var, List[Sym]] = solution.unassigned.groupBy(_.variable) + + val expanded = for { + (variable, syms) <- groupedByVar.toList + } yield { + + val (equal, notEqual) = varAssignment.getOrElse(variable, Nil -> Nil) + + def addVarAssignment(equalTo: List[Const], notEqualTo: List[Const]) = { + Map(variable ->(equal ++ equalTo, notEqual ++ notEqualTo)) + } + + // this assignment is needed in case that + // there exists already an assign + val allNotEqual = addVarAssignment(Nil, syms.map(_.const)) - debug.patmat("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment)) + // this assignment is conflicting on purpose: + // a list counter example could contain wildcards: e.g. `List(_,_)` + val allEqual = addVarAssignment(syms.map(_.const), Nil) + if(equal.isEmpty) { + val oneHot = for { + s <- syms + } yield { + addVarAssignment(List(s.const), syms.filterNot(_ == s).map(_.const)) + } + allEqual :: allNotEqual :: oneHot + } else { + allEqual :: allNotEqual :: Nil + } + } + + if (expanded.isEmpty) { + List(varAssignment) + } else { + // we need the cartesian product here, + // since we want to report all missing cases + // (i.e., combinations) + val cartesianProd = expanded.reduceLeft((xs, ys) => + for {map1 <- xs + map2 <- ys} yield { + map1 ++ map2 + }) + + // add expanded variables + // note that we can just use `++` + // since the Maps have disjoint keySets + for { + m <- cartesianProd + } yield { + varAssignment ++ m + } + } + } + + // return constructor call when the model is a true counter example + // (the variables don't take into account type information derived from other variables, + // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _), + // since we didn't realize the tail of the outer cons was a Nil) + def modelToCounterExample(scrutVar: Var)(varAssignment: Map[Var, (Seq[Const], Seq[Const])]): Option[CounterExample] = { // chop a path into a list of symbols def chop(path: Tree): List[Symbol] = path match { case Ident(_) => List(path.symbol) @@ -742,7 +844,7 @@ trait MatchAnalysis extends MatchApproximation { // then we can safely ignore these counter examples since we will eventually encounter // both counter examples separately case _ if inSameDomain => None - + // not a valid counter-example, possibly since we have a definite type but there was a field mismatch // TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive case _ => Some(NoExample) @@ -761,12 +863,12 @@ trait MatchAnalysis extends MatchApproximation { } def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = { - if (!suppression.unreachable) { + if (!suppression.suppressUnreachable) { unreachableCase(prevBinder, cases, pt) foreach { caseIndex => reportUnreachable(cases(caseIndex).last.pos) } } - if (!suppression.exhaustive) { + if (!suppression.suppressExhaustive) { val counterExamples = exhaustive(prevBinder, cases, pt) if (counterExamples.nonEmpty) reportMissingCases(prevBinder.pos, counterExamples) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 22661d6ccf..0678ec52e7 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -208,7 +208,7 @@ trait MatchTranslation { case _ => (cases, None) } - checkMatchVariablePatterns(nonSyntheticCases) + if (!settings.XnoPatmatAnalysis) checkMatchVariablePatterns(nonSyntheticCases) // we don't transform after uncurry // (that would require more sophistication when generating trees, @@ -248,7 +248,10 @@ trait MatchTranslation { if (caseDefs forall treeInfo.isCatchCase) caseDefs else { val swatches = { // switch-catches - val bindersAndCases = caseDefs map { caseDef => + // SI-7459 must duplicate here as we haven't commited to switch emission, and just figuring out + // if we can ends up mutating `caseDefs` down in the use of `substituteSymbols` in + // `TypedSubstitution#Substitution`. That is called indirectly by `emitTypeSwitch`. + val bindersAndCases = caseDefs.map(_.duplicate) map { caseDef => // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there) // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this) val caseScrutSym = freshSym(pos, pureType(ThrowableTpe)) @@ -518,7 +521,7 @@ trait MatchTranslation { // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component override protected def tupleSel(binder: Symbol)(i: Int): Tree = { val accessors = binder.caseFieldAccessors - if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1) + if (accessors isDefinedAt (i-1)) gen.mkAttributedStableRef(binder) DOT accessors(i-1) else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 3fd9ce76f8..971a019e4e 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -21,9 +21,10 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { import global._ import definitions._ - final case class Suppression(exhaustive: Boolean, unreachable: Boolean) + final case class Suppression(suppressExhaustive: Boolean, suppressUnreachable: Boolean) object Suppression { val NoSuppression = Suppression(false, false) + val FullSuppression = Suppression(true, true) } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -166,8 +167,17 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { val usedBinders = new mutable.HashSet[Symbol]() // all potentially stored subpat binders val potentiallyStoredBinders = stored.unzip._1.toSet + def ref(sym: Symbol) = + if (potentiallyStoredBinders(sym)) usedBinders += sym // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders - in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol) + in.foreach { + case tt: TypeTree => + tt.tpe foreach { // SI-7459 e.g. case Prod(t) => new t.u.Foo + case SingleType(_, sym) => ref(sym) + case _ => + } + case t => ref(t.symbol) + } if (usedBinders.isEmpty) in else { @@ -542,7 +552,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}"))) val (suppression, requireSwitch): (Suppression, Boolean) = - if (settings.XnoPatmatAnalysis) (Suppression.NoSuppression, false) + if (settings.XnoPatmatAnalysis) (Suppression.FullSuppression, false) else scrut match { case Typed(tree, tpt) => val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass @@ -574,7 +584,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { (Suppression.NoSuppression, false) } - emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, suppression.exhaustive).getOrElse{ + emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, unchecked = suppression.suppressExhaustive).getOrElse{ if (requireSwitch) reporter.warning(scrut.pos, "could not emit switch for @switch annotated match") if (casesNoSubstOnly nonEmpty) { diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index ef50e083a1..d35aad964d 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -12,7 +12,7 @@ import scala.language.postfixOps import scala.tools.nsc.transform.TypingTransformers import scala.tools.nsc.transform.Transform import scala.reflect.internal.util.Statistics -import scala.reflect.internal.Types +import scala.reflect.internal.{Mode, Types} import scala.reflect.internal.util.Position /** Translate pattern matching. @@ -198,33 +198,57 @@ trait Interface extends ast.TreeDSL { } class Substitution(val from: List[Symbol], val to: List[Tree]) { - import global.{Transformer, Ident, NoType} + import global.{Transformer, Ident, NoType, TypeTree, SingleType} // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed, // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees. def apply(tree: Tree): Tree = { // according to -Ystatistics 10% of translateMatch's time is spent in this method... // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst - if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree - else (new Transformer { + val toIdents = to.forall(_.isInstanceOf[Ident]) + val containsSym = tree.exists { + case i@Ident(_) => from contains i.symbol + case tt: TypeTree => tt.tpe.exists { + case SingleType(_, sym) => + (from contains sym) && { + if (!toIdents) global.devWarning(s"Unexpected substitution of non-Ident into TypeTree `$tt`, subst= $this") + true + } + case _ => false + } + case _ => false + } + val toSyms = to.map(_.symbol) + object substIdentsForTrees extends Transformer { private def typedIfOrigTyped(to: Tree, origTp: Type): Tree = if (origTp == null || origTp == NoType) to // important: only type when actually substing and when original tree was typed // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors) else typer.typed(to) + def typedStable(t: Tree) = typer.typed(t.shallowDuplicate, Mode.MonoQualifierModes | Mode.TYPEPATmode) + lazy val toTypes: List[Type] = to map (tree => typedStable(tree).tpe) + override def transform(tree: Tree): Tree = { def subst(from: List[Symbol], to: List[Tree]): Tree = if (from.isEmpty) tree - else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate.setPos(tree.pos), tree.tpe) + else if (tree.symbol == from.head) typedIfOrigTyped(typedStable(to.head).setPos(tree.pos), tree.tpe) else subst(from.tail, to.tail) - tree match { + val tree1 = tree match { case Ident(_) => subst(from, to) case _ => super.transform(tree) } + tree1.modifyType(_.substituteTypes(from, toTypes)) } - }).transform(tree) + } + if (containsSym) { + if (to.forall(_.isInstanceOf[Ident])) + tree.duplicate.substituteSymbols(from, to.map(_.symbol)) // SI-7459 catches `case t => new t.Foo` + else + substIdentsForTrees.transform(tree) + } + else tree } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index 1ba13c0617..27217f0dc2 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -288,7 +288,7 @@ trait Solving extends Logic { val NoTseitinModel: TseitinModel = null // returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??) - def findAllModelsFor(solvable: Solvable): List[Model] = { + def findAllModelsFor(solvable: Solvable): List[Solution] = { debug.patmat("find all models for\n"+ cnfString(solvable.cnf)) // we must take all vars from non simplified formula @@ -305,54 +305,12 @@ trait Solving extends Logic { relevantLits.map(lit => -lit) } - /** - * The DPLL procedure only returns a minimal mapping from literal to value - * such that the CNF formula is satisfied. - * E.g. for: - * `(a \/ b)` - * The DPLL procedure will find either {a = true} or {b = true} - * as solution. - * - * The expansion step will amend both solutions with the unassigned variable - * i.e., {a = true} will be expanded to {a = true, b = true} and {a = true, b = false}. - */ - def expandUnassigned(unassigned: List[Int], model: TseitinModel): List[TseitinModel] = { - // the number of solutions is doubled for every unassigned variable - val expandedModels = 1 << unassigned.size - var current = mutable.ArrayBuffer[TseitinModel]() - var next = mutable.ArrayBuffer[TseitinModel]() - current.sizeHint(expandedModels) - next.sizeHint(expandedModels) - - current += model - - // we use double buffering: - // read from `current` and create a two models for each model in `next` - for { - s <- unassigned - } { - for { - model <- current - } { - def force(l: Lit) = model + l - - next += force(Lit(s)) - next += force(Lit(-s)) - } - - val tmp = current - current = next - next = tmp - - next.clear() - } - - current.toList + final case class TseitinSolution(model: TseitinModel, unassigned: List[Int]) { + def projectToSolution(symForVar: Map[Int, Sym]) = Solution(projectToModel(model, symForVar), unassigned map symForVar) } - def findAllModels(clauses: Array[Clause], - models: List[TseitinModel], - recursionDepthAllowed: Int = global.settings.YpatmatExhaustdepth.value): List[TseitinModel]= + models: List[TseitinSolution], + recursionDepthAllowed: Int = global.settings.YpatmatExhaustdepth.value): List[TseitinSolution]= if (recursionDepthAllowed == 0) { val maxDPLLdepth = global.settings.YpatmatExhaustdepth.value reportWarning("(Exhaustivity analysis reached max recursion depth, not all missing cases are reported. " + @@ -368,17 +326,15 @@ trait Solving extends Logic { val unassigned: List[Int] = (relevantVars -- model.map(lit => lit.variable)).toList debug.patmat("unassigned "+ unassigned +" in "+ model) - val forced = expandUnassigned(unassigned, model) - debug.patmat("forced "+ forced) + val solution = TseitinSolution(model, unassigned) val negated = negateModel(model) - findAllModels(clauses :+ negated, forced ++ models, recursionDepthAllowed - 1) + findAllModels(clauses :+ negated, solution :: models, recursionDepthAllowed - 1) } else models } - val tseitinModels: List[TseitinModel] = findAllModels(solvable.cnf, Nil) - val models: List[Model] = tseitinModels.map(projectToModel(_, solvable.symbolMapping.symForVar)) - models + val tseitinSolutions = findAllModels(solvable.cnf, Nil) + tseitinSolutions.map(_.projectToSolution(solvable.symbolMapping.symForVar)) } private def withLit(res: TseitinModel, l: Lit): TseitinModel = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index aaa75b5ee1..5719a9e358 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3281,6 +3281,22 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } handleOverloaded + case _ if isPolymorphicSignature(fun.symbol) => + // Mimic's Java's treatment of polymorphic signatures as described in + // https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.12.3 + // + // One can think of these methods as being infinitely overloaded. We create + // a ficticious new cloned method symbol for each call site that takes on a signature + // governed by a) the argument types and b) the expected type + val args1 = typedArgs(args, forArgMode(fun, mode)) + val pts = args1.map(_.tpe.deconst) + val clone = fun.symbol.cloneSymbol + val cloneParams = pts map (pt => clone.newValueParameter(currentUnit.freshTermName()).setInfo(pt)) + val resultType = if (isFullyDefined(pt)) pt else ObjectTpe + clone.modifyInfo(mt => copyMethodType(mt, cloneParams, resultType)) + val fun1 = fun.setSymbol(clone).setType(clone.info) + doTypedApply(tree, fun1, args1, mode, resultType).setType(resultType) + case mt @ MethodType(params, _) => val paramTypes = mt.paramTypes // repeat vararg as often as needed, remove by-name @@ -3776,8 +3792,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case TypeRef(pre, sym, args) => if (sym.isAliasType && containsLocal(tp) && (tp.dealias ne tp)) apply(tp.dealias) else { - if (pre.isVolatile) - InferTypeWithVolatileTypeSelectionError(tree, pre) + if (pre.isVolatile) pre match { + case SingleType(_, sym) if sym.isSynthetic && isPastTyper => + debuglog(s"ignoring volatility of prefix in pattern matcher generated inferred type: $tp") // See pos/t7459c.scala + case _ => + InferTypeWithVolatileTypeSelectionError(tree, pre) + } mapOver(tp) } case _ => diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 5d00141e6a..4476697cfd 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -78,7 +78,11 @@ trait InteractiveAnalyzer extends Analyzer { val owningInfo = sym.owner.info val existingDerivedSym = owningInfo.decl(sym.name.toTermName).filter(sym => sym.isSynthetic && sym.isMethod) existingDerivedSym.alternatives foreach (owningInfo.decls.unlink) - enterImplicitWrapper(tree.asInstanceOf[ClassDef]) + val defTree = tree match { + case dd: DocDef => dd.definition // See SI-9011, Scala IDE's presentation compiler incorporates ScalaDocGlobal with InterativeGlobal, so we have to unwrap DocDefs. + case _ => tree + } + enterImplicitWrapper(defTree.asInstanceOf[ClassDef]) } super.enterExistingSym(sym, tree) } diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index e11d1b35d7..c4aa511cd7 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -121,7 +121,8 @@ abstract class Enumeration (initial: Int) extends Serializable { * @throws NoSuchElementException if no `Value` with a matching * name is in this `Enumeration` */ - final def withName(s: String): Value = values.find(_.toString == s).get + final def withName(s: String): Value = values.find(_.toString == s).getOrElse( + throw new NoSuchElementException(s"No value found for '$s'")) /** Creates a fresh value, part of this enumeration. */ protected final def Value: Value = Value(nextId) diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala index 8f77114746..011fd415ee 100644 --- a/src/library/scala/collection/mutable/ArrayBuffer.scala +++ b/src/library/scala/collection/mutable/ArrayBuffer.scala @@ -128,7 +128,7 @@ class ArrayBuffer[A](override protected val initialSize: Int) override def ++=:(xs: TraversableOnce[A]): this.type = { insertAll(0, xs.toTraversable); this } /** Inserts new elements at the index `n`. Opposed to method - * `update`, this method will not replace an element with a + * `update`, this method will not replace an element with a new * one. Instead, it will insert a new element at index `n`. * * @param n the index where a new element will be inserted. @@ -137,12 +137,13 @@ class ArrayBuffer[A](override protected val initialSize: Int) */ def insertAll(n: Int, seq: Traversable[A]) { if (n < 0 || n > size0) throw new IndexOutOfBoundsException(n.toString) - val xs = seq.toList - val len = xs.length - ensureSize(size0 + len) + val len = seq.size + val newSize = size0 + len + ensureSize(newSize) + copy(n, n + len, size0 - n) - xs.copyToArray(array.asInstanceOf[scala.Array[Any]], n) - size0 += len + seq.copyToArray(array.asInstanceOf[Array[Any]], n) + size0 = newSize } /** Removes the element on a given index position. It takes time linear in diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala index 74c3e06839..9f0b56b4fe 100644 --- a/src/library/scala/io/Source.scala +++ b/src/library/scala/io/Source.scala @@ -169,9 +169,20 @@ object Source { createBufferedSource(is, reset = () => fromInputStream(is)(codec), close = () => is.close())(codec) } -/** The class `Source` implements an iterable representation of source data. - * Calling method `reset` returns an identical, resetted source, where - * possible. +/** An iterable representation of source data. + * It may be reset with the optional `reset` method. + * + * Subclasses must supply [[scala.io.Source@iter the underlying iterator]]. + * + * Error handling may be customized by overriding the [[scala.io.Source@report report]] method. + * + * The [[scala.io.Source@ch current input]] and [[scala.io.Source@pos position]], + * as well as the [[scala.io.Source@next next character]] methods delegate to + * [[scala.io.Source$Positioner the positioner]]. + * + * The default positioner encodes line and column numbers in the position passed to `report`. + * This behavior can be changed by supplying a + * [[scala.io.Source@withPositioning(pos:Source.this.Positioner):Source.this.type custom positioner]]. * * @author Burak Emir * @version 1.0 diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 7e2d124486..4263bbccf3 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -514,6 +514,8 @@ trait Definitions extends api.StandardDefinitions { lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature] lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature] + lazy val MethodHandle = getClassIfDefined("java.lang.invoke.MethodHandle") + // Option classes lazy val OptionClass: ClassSymbol = requiredClass[Option[_]] lazy val OptionModule: ModuleSymbol = requiredModule[scala.Option.type] @@ -1508,6 +1510,9 @@ trait Definitions extends api.StandardDefinitions { lazy val PartialManifestClass = getTypeMember(ReflectPackage, tpnme.ClassManifest) lazy val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass) + + def isPolymorphicSignature(sym: Symbol) = PolySigMethods(sym) + private lazy val PolySigMethods: Set[Symbol] = Set[Symbol](MethodHandle.info.decl(sn.Invoke), MethodHandle.info.decl(sn.InvokeExact)).filter(_.exists) } } } diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 667ff7c4b4..c94ee996e4 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -1147,6 +1147,7 @@ trait StdNames { final val GetClassLoader: TermName = newTermName("getClassLoader") final val GetMethod: TermName = newTermName("getMethod") final val Invoke: TermName = newTermName("invoke") + final val InvokeExact: TermName = newTermName("invokeExact") val Boxed = immutable.Map[TypeName, TypeName]( tpnme.Boolean -> BoxedBoolean, diff --git a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala index 8615e34fad..241638e88e 100644 --- a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala +++ b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala @@ -196,10 +196,10 @@ object ByteCodecs { * * Sometimes returns (length+1) of the decoded array. Example: * - * scala> val enc = scala.reflect.generic.ByteCodecs.encode(Array(1,2,3)) + * scala> val enc = scala.reflect.internal.pickling.ByteCodecs.encode(Array(1,2,3)) * enc: Array[Byte] = Array(2, 5, 13, 1) * - * scala> scala.reflect.generic.ByteCodecs.decode(enc) + * scala> scala.reflect.internal.pickling.ByteCodecs.decode(enc) * res43: Int = 4 * * scala> enc diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index c87b810bdd..1c0aa7cf6d 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -310,6 +310,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.QuasiquoteClass_api_unapply definitions.ScalaSignatureAnnotation definitions.ScalaLongSignatureAnnotation + definitions.MethodHandle definitions.OptionClass definitions.OptionModule definitions.SomeClass diff --git a/test/files/jvm/t9044.scala b/test/files/jvm/t9044.scala new file mode 100644 index 0000000000..b1073325e8 --- /dev/null +++ b/test/files/jvm/t9044.scala @@ -0,0 +1,6 @@ +trait A +trait B +object Test extends A with B with App { + val is = Test.getClass.getInterfaces.mkString(", ") + assert(is == "interface A, interface B, interface scala.App", is) +} diff --git a/test/files/pos/patmat-suppress.flags b/test/files/pos/patmat-suppress.flags new file mode 100644 index 0000000000..a988a5b807 --- /dev/null +++ b/test/files/pos/patmat-suppress.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Xno-patmat-analysis
\ No newline at end of file diff --git a/test/files/pos/patmat-suppress.scala b/test/files/pos/patmat-suppress.scala new file mode 100644 index 0000000000..7c8aded690 --- /dev/null +++ b/test/files/pos/patmat-suppress.scala @@ -0,0 +1,159 @@ +// test that none of these warn due to -Xno-patmat-analysis +// tests taken from test/files/neg/patmatexhaust.scala, test/files/neg/pat_unreachable.scala +class TestSealedExhaustive { // compile only + sealed abstract class Foo + + case class Bar(x:Int) extends Foo + case object Baz extends Foo + + def ma1(x:Foo) = x match { + case Bar(_) => // not exhaustive + } + + def ma2(x:Foo) = x match { + case Baz => // not exhaustive + } + + sealed abstract class Mult + case class Kult(s:Mult) extends Mult + case class Qult() extends Mult + + def ma33(x:Kult) = x match { // exhaustive + case Kult(_) => // exhaustive + } + + def ma3(x:Mult) = (x,x) match { // not exhaustive + case (Kult(_), Qult()) => // Kult missing + //case (Kult(_), Kult(_)) => + case (Qult(), Kult(_)) => // Qult missing + //case (Qult(), Qult()) => + } + + def ma3u(x:Mult) = ((x,x) : @unchecked) match { // not exhaustive, but not checked! + case (Kult(_), Qult()) => + case (Qult(), Kult(_)) => + } + + sealed abstract class Deep + + case object Ga extends Deep + sealed class Gp extends Deep + case object Gu extends Gp + + def zma3(x:Deep) = x match { // exhaustive! + case _ => + } + def zma4(x:Deep) = x match { // exhaustive! + case Ga => + case _ => + } + + def ma4(x:Deep) = x match { // missing cases: Gu, Gp which is not abstract so must be included + case Ga => + } + + def ma5(x:Deep) = x match { + case Gu => + case _ if 1 == 0 => + case Ga => + } + + def ma6() = List(1,2) match { // give up + case List(1,2) => + case x :: xs => + } + + def ma7() = List(1,2) match { //exhaustive + case 1::2::Nil => + case _ => + } + + sealed class B + case class B1() extends B + case object B2 extends B + def ma8(x: B) = x match { + case _: B => true + } + def ma9(x: B) = x match { + case B1() => true // missing B, which is not abstract so must be included + case B2 => true + } + + object ob1 { + sealed abstract class C + sealed abstract class C1 extends C + object C2 extends C + case class C3() extends C + case object C4 extends C + + def ma10(x: C) = x match { // exhaustive: abstract sealed C1 is dead end. + case C3() => true + case C2 | C4 => true + } + } + + object ob2 { + sealed abstract class C + abstract class C1 extends C + object C2 extends C + case class C3() extends C + case object C4 extends C + + def ma10(x: C) = x match { // not exhaustive: C1 is not sealed. + case C3() => true + case C2 | C4 => true + } + } + object ob3 { + sealed abstract class C + sealed abstract class C1 extends C + object D1 extends C1 + case class D2() extends C1 + object C2 extends C + case class C3() extends C + case object C4 extends C + + def ma10(x: C) = x match { // not exhaustive: C1 has subclasses. + case C3() => true + case C2 | C4 => true + } + } + object ob4 { + sealed abstract class C + sealed class C1 extends C + object C2 extends C + case class C3() extends C + case object C4 extends C + + def ma10(x: C) = x match { // not exhaustive: C1 is not abstract. + case C3() => true + case C2 | C4 => true + } + } +} + +object TestUnreachable extends App { + def unreachable1(xs:Seq[Char]) = xs match { + case Seq(x, y, _*) => x::y::Nil + case Seq(x, y, z, w) => List(z,w) // redundant! + } + def unreachable2(xs:Seq[Char]) = xs match { + case Seq(x, y, _*) => x::y::Nil + case Seq(x, y) => List(x, y) + } + + def not_unreachable(xs:Seq[Char]) = xs match { + case Seq(x, y, _*) => x::y::Nil + case Seq(x) => List(x) + } + def not_unreachable2(xs:Seq[Char]) = xs match { + case Seq(x, y) => x::y::Nil + case Seq(x, y, z, _*) => List(x,y) + } + + def contrivedExample[A, B, C](a: A, b: B, c: C): Unit = a match { + case b => println("matched b") + case c => println("matched c") + case _ => println("matched neither") + } +} diff --git a/test/files/pos/t7459a.scala b/test/files/pos/t7459a.scala new file mode 100644 index 0000000000..5107715e06 --- /dev/null +++ b/test/files/pos/t7459a.scala @@ -0,0 +1,18 @@ +trait SpecialException extends Throwable + +object Test { + def run() { + try { + ??? + } catch { + case e: SpecialException => e.isInstanceOf[SpecialException] + case e => + } + + // OKAY + // (null: Throwable) match { + // case e: SpecialException => e.isInstanceOf[SpecialException] + // case e => + // } + } +}
\ No newline at end of file diff --git a/test/files/pos/t7459b.scala b/test/files/pos/t7459b.scala new file mode 100644 index 0000000000..a4b4fd07a9 --- /dev/null +++ b/test/files/pos/t7459b.scala @@ -0,0 +1,12 @@ +import scala.concurrent._ +import scala.util._ + + +class Test { + (null: Any) match { + case s @ Some(_) => ??? + case f @ _ => + () => f + ??? + } +}
\ No newline at end of file diff --git a/test/files/pos/t7459c.scala b/test/files/pos/t7459c.scala new file mode 100644 index 0000000000..dc2605abe6 --- /dev/null +++ b/test/files/pos/t7459c.scala @@ -0,0 +1,18 @@ +object Test { + trait Universe { + type Type + type TypeTag[A] >: Null <: TypeTagApi[A] + trait TypeTagApi[A] { def tpe: Type } + } + trait JavaUniverse extends Universe + + trait Mirror[U <: Universe] { + def universe: U + } + (null: Mirror[_]).universe match { + case ju: JavaUniverse => + val ju1 = ju + val f = {() => (null: ju.TypeTag[Nothing]).tpe } + } + trait M[A] +} diff --git a/test/files/pos/t7459d.scala b/test/files/pos/t7459d.scala new file mode 100644 index 0000000000..7843156885 --- /dev/null +++ b/test/files/pos/t7459d.scala @@ -0,0 +1,8 @@ +class Test { + (null: Any) match { + case s @ Some(_) => ??? + case f @ _ => + () => f + ??? + } +} diff --git a/test/files/pos/t7704.scala b/test/files/pos/t7704.scala new file mode 100644 index 0000000000..cae88d3324 --- /dev/null +++ b/test/files/pos/t7704.scala @@ -0,0 +1,10 @@ +class Attr { type V ; class Val } +class StrAttr extends Attr { type V = String } +class BoolAttr extends Attr { type V = Boolean } + +object Main { + def f(x: Attr) = x match { + case v: StrAttr => new v.Val + case v: BoolAttr => new v.Val + } +} diff --git a/test/files/pos/t8999.flags b/test/files/pos/t8999.flags new file mode 100644 index 0000000000..0f96f1f872 --- /dev/null +++ b/test/files/pos/t8999.flags @@ -0,0 +1 @@ +-nowarn
\ No newline at end of file diff --git a/test/files/pos/t8999.scala b/test/files/pos/t8999.scala new file mode 100644 index 0000000000..99c4b2ad84 --- /dev/null +++ b/test/files/pos/t8999.scala @@ -0,0 +1,271 @@ +object Types { + + abstract sealed class Type + + case object AnyType extends Type + + case object NothingType extends Type + + case object UndefType extends Type + + case object BooleanType extends Type + + case object IntType extends Type + + case object LongType extends Type + + case object FloatType extends Type + + case object DoubleType extends Type + + case object StringType extends Type + + case object NullType extends Type + + sealed abstract class ReferenceType extends Type + + final case class ClassType(className: String) extends ReferenceType + + final case class ArrayType(baseClassName: String, dimensions: Int) extends ReferenceType + + final case class RecordType(fields: List[RecordType.Field]) extends Type + + object RecordType { + final case class Field(name: String, originalName: Option[String], + tpe: Type, mutable: Boolean) + } + + case object NoType extends Type + +} + + +sealed abstract class ClassKind + +object ClassKind { + + case object Class extends ClassKind + + case object ModuleClass extends ClassKind + + case object Interface extends ClassKind + + case object RawJSType extends ClassKind + + case object HijackedClass extends ClassKind + + case object TraitImpl extends ClassKind + +} + +object Trees { + + import Types._ + + abstract sealed class Tree + + case object EmptyTree extends Tree + + sealed trait PropertyName + case class Ident(name: String, originalName: Option[String]) extends PropertyName + object Ident { + def apply(name: String): Ident = + new Ident(name, Some(name)) + } + + case class VarDef(name: Ident, vtpe: Type, mutable: Boolean, rhs: Tree) extends Tree + + case class ParamDef(name: Ident, ptpe: Type, mutable: Boolean) extends Tree + + case class Skip() extends Tree + + class Block private(val stats: List[Tree]) extends Tree + + object Block { + def unapply(block: Block): Some[List[Tree]] = Some(block.stats) + } + + case class Labeled(label: Ident, tpe: Type, body: Tree) extends Tree + + case class Assign(lhs: Tree, rhs: Tree) extends Tree + + case class Return(expr: Tree, label: Option[Ident] = None) extends Tree + + case class If(cond: Tree, thenp: Tree, elsep: Tree) extends Tree + + case class While(cond: Tree, body: Tree, label: Option[Ident] = None) extends Tree + + case class DoWhile(body: Tree, cond: Tree, label: Option[Ident] = None) extends Tree + + case class Try(block: Tree, errVar: Ident, handler: Tree, finalizer: Tree) extends Tree + + case class Throw(expr: Tree) extends Tree + + case class Continue(label: Option[Ident] = None) extends Tree + + case class Match(selector: Tree, cases: List[(List[Literal], Tree)], default: Tree) extends Tree + + case class Debugger() extends Tree + + case class New(cls: ClassType, ctor: Ident, args: List[Tree]) extends Tree + + case class LoadModule(cls: ClassType) extends Tree + + case class StoreModule(cls: ClassType, value: Tree) extends Tree + + case class Select(qualifier: Tree, item: Ident, mutable: Boolean) extends Tree + + case class Apply(receiver: Tree, method: Ident, args: List[Tree]) extends Tree + + case class StaticApply(receiver: Tree, cls: ClassType, method: Ident, args: List[Tree]) extends Tree + + case class TraitImplApply(impl: ClassType, method: Ident, args: List[Tree]) extends Tree + + case class UnaryOp(op: Int, lhs: Tree) extends Tree + + case class BinaryOp(op: Int, lhs: Tree, rhs: Tree) extends Tree + + case class NewArray(tpe: ArrayType, lengths: List[Tree]) extends Tree + + case class ArrayValue(tpe: ArrayType, elems: List[Tree]) extends Tree + + case class ArrayLength(array: Tree) extends Tree + + case class ArraySelect(array: Tree, index: Tree) extends Tree + + case class RecordValue(tpe: RecordType, elems: List[Tree]) extends Tree + + case class IsInstanceOf(expr: Tree, cls: ReferenceType) extends Tree + + case class AsInstanceOf(expr: Tree, cls: ReferenceType) extends Tree + + case class Unbox(expr: Tree, charCode: Char) extends Tree + + case class GetClass(expr: Tree) extends Tree + + case class CallHelper(helper: String, args: List[Tree]) extends Tree + + case class JSNew(ctor: Tree, args: List[Tree]) extends Tree + + case class JSDotSelect(qualifier: Tree, item: Ident) extends Tree + + case class JSBracketSelect(qualifier: Tree, item: Tree) extends Tree + + case class JSFunctionApply(fun: Tree, args: List[Tree]) extends Tree + + case class JSDotMethodApply(receiver: Tree, method: Ident, args: List[Tree]) extends Tree + + case class JSBracketMethodApply(receiver: Tree, method: Tree, args: List[Tree]) extends Tree + + case class JSDelete(prop: Tree) extends Tree + + case class JSUnaryOp(op: String, lhs: Tree) extends Tree + + case class JSBinaryOp(op: String, lhs: Tree, rhs: Tree) extends Tree + + case class JSArrayConstr(items: List[Tree]) extends Tree + + case class JSObjectConstr(fields: List[(PropertyName, Tree)]) extends Tree + + case class JSEnvInfo() extends Tree + + sealed trait Literal extends Tree + + case class Undefined() extends Literal + + case class UndefinedParam() extends Literal + + case class Null() extends Literal + + case class BooleanLiteral(value: Boolean) extends Literal + + case class IntLiteral(value: Int) extends Literal + + case class LongLiteral(value: Long) extends Literal + + case class FloatLiteral(value: Float) extends Literal + + case class DoubleLiteral(value: Double) extends Literal + + case class StringLiteral(value: String) extends Literal with PropertyName + + case class ClassOf(cls: ReferenceType) extends Literal + + case class VarRef(ident: Ident, mutable: Boolean) extends Tree + + case class This() extends Tree + + case class Closure(captureParams: List[ParamDef], params: List[ParamDef], + body: Tree, captureValues: List[Tree]) extends Tree + + case class ClassDef(name: Ident, kind: ClassKind, parent: Option[Ident], ancestors: List[Ident], defs: List[Tree]) extends Tree + + case class MethodDef(name: PropertyName, args: List[ParamDef], resultType: Type, body: Tree) extends Tree + + case class PropertyDef(name: PropertyName, getterBody: Tree, setterArg: ParamDef, setterBody: Tree) extends Tree + + case class ConstructorExportDef(name: String, args: List[ParamDef], body: Tree) extends Tree + + case class ModuleExportDef(fullName: String) extends Tree + + final class TreeHash(val treeHash: Array[Byte], val posHash: Array[Byte]) +} + +object Main { + import Trees._ + import Types._ + + private def transform(tree: Tree) = { + val ObjectClass = "O" + tree match { + case VarDef(_, _, _, rhs) => + case tree: Block => + case Labeled(ident@Ident(label, _), tpe, body) => + case Assign(lhs, rhs) => + case Return(expr, optLabel) => + case If(cond, thenp, elsep) => + case While(cond, body, optLabel) => + case DoWhile(body, cond, None) => + case Try(block, errVar, EmptyTree, finalizer) => + case Try(block, errVar@Ident(name, originalName), handler, finalizer) => + case Throw(expr) => + case Continue(optLabel) => + case Match(selector, cases, default) => + case New(cls, ctor, args) => + case StoreModule(cls, value) => + case tree: Select => + case tree: Apply => + case tree: StaticApply => + case tree: TraitImplApply => + case tree@UnaryOp(_, arg) => + case tree@BinaryOp(op, lhs, rhs) => + case NewArray(tpe, lengths) => + case ArrayValue(tpe, elems) => + case ArrayLength(array) => + case ArraySelect(array, index) => + case RecordValue(tpe, elems) => + case IsInstanceOf(expr, ClassType(ObjectClass)) => + case IsInstanceOf(expr, tpe) => + case AsInstanceOf(expr, ClassType(ObjectClass)) => + case AsInstanceOf(expr, cls) => + case Unbox(arg, charCode) => + case GetClass(expr) => + case JSNew(ctor, args) => + case JSDotSelect(qualifier, item) => + case JSBracketSelect(qualifier, item) => + case tree: JSFunctionApply => + case JSDotMethodApply(receiver, method, args) => + case JSBracketMethodApply(receiver, method, args) => + case JSDelete(JSDotSelect(obj, prop)) => + case JSDelete(JSBracketSelect(obj, prop)) => + case JSUnaryOp(op, lhs) => + case JSBinaryOp(op, lhs, rhs) => + case JSArrayConstr(items) => + case JSObjectConstr(fields) => + case _: VarRef | _: This => + case Closure(captureParams, params, body, captureValues) => + case _: Skip | _: Debugger | _: LoadModule | + _: JSEnvInfo | _: Literal | EmptyTree => + } + } +}
\ No newline at end of file diff --git a/test/files/run/t7459a.scala b/test/files/run/t7459a.scala new file mode 100644 index 0000000000..e9653c6e79 --- /dev/null +++ b/test/files/run/t7459a.scala @@ -0,0 +1,14 @@ +class LM { + class Node[B1] + case class CC(n: LM) + + // crash + val f: (LM => Any) = { + case tttt => + new tttt.Node[Any]() + } +} + +object Test extends App { + new LM().f(new LM()) +} diff --git a/test/files/run/t7459b-optimize.flags b/test/files/run/t7459b-optimize.flags new file mode 100644 index 0000000000..49d036a887 --- /dev/null +++ b/test/files/run/t7459b-optimize.flags @@ -0,0 +1 @@ +-optimize diff --git a/test/files/run/t7459b-optimize.scala b/test/files/run/t7459b-optimize.scala new file mode 100644 index 0000000000..605890962c --- /dev/null +++ b/test/files/run/t7459b-optimize.scala @@ -0,0 +1,21 @@ +class LM { + class Node[B1] + + // crash + val g: (CC => Any) = { + case CC(tttt) => + new tttt.Node[Any]() + } + + val h: (Some[CC] => Any) = { + case Some(CC(tttt)) => + new tttt.Node[Any]() + } +} + +object Test extends App { + new LM().g(new CC(new LM())) + new LM().h(Some(new CC(new LM()))) +} +case class CC(n: LM) + diff --git a/test/files/run/t7459b.scala b/test/files/run/t7459b.scala new file mode 100644 index 0000000000..605890962c --- /dev/null +++ b/test/files/run/t7459b.scala @@ -0,0 +1,21 @@ +class LM { + class Node[B1] + + // crash + val g: (CC => Any) = { + case CC(tttt) => + new tttt.Node[Any]() + } + + val h: (Some[CC] => Any) = { + case Some(CC(tttt)) => + new tttt.Node[Any]() + } +} + +object Test extends App { + new LM().g(new CC(new LM())) + new LM().h(Some(new CC(new LM()))) +} +case class CC(n: LM) + diff --git a/test/files/run/t7459c.scala b/test/files/run/t7459c.scala new file mode 100644 index 0000000000..144c5d793b --- /dev/null +++ b/test/files/run/t7459c.scala @@ -0,0 +1,16 @@ +class LM { + class Node[B1] + + // crash + val g: (CC => Any) = { + case CC(tttt) => + tttt.## // no crash + new tttt.Node[Any]() + } +} + +object Test extends App { + new LM().g(new CC(new LM())) +} +case class CC(n: LM) + diff --git a/test/files/run/t7459d.scala b/test/files/run/t7459d.scala new file mode 100644 index 0000000000..3263701f9d --- /dev/null +++ b/test/files/run/t7459d.scala @@ -0,0 +1,15 @@ +class LM { + class Node[B1] + case class CC(n: LM) + + // crash + val f: (LM => Any) = { + case tttt => + val uuuu: (tttt.type, Any) = (tttt, 0) + new uuuu._1.Node[Any]() + } +} + +object Test extends App { + new LM().f(new LM()) +} diff --git a/test/files/run/t7459f.scala b/test/files/run/t7459f.scala new file mode 100644 index 0000000000..63e2109560 --- /dev/null +++ b/test/files/run/t7459f.scala @@ -0,0 +1,12 @@ +object Test extends App { + class C + + case class FooSeq(x: Int, y: String, z: C*) + + FooSeq(1, "a", new C()) match { + case FooSeq(1, "a", x@_* ) => + //println(x.toList) + x.asInstanceOf[x.type] + assert(x.isInstanceOf[x.type]) + } +} diff --git a/test/files/run/t7965.scala b/test/files/run/t7965.scala new file mode 100644 index 0000000000..df80d4b5bb --- /dev/null +++ b/test/files/run/t7965.scala @@ -0,0 +1,54 @@ +// Test that scala doesn't apply boxing or varargs conversions to the +// @PolymorphicSignature magical methods, MethodHandle#{invoke, invokeExact} +object Test { + val code = """ + +object O { + private def foo = "foo" + private def bar(x: Int): Int = -x + private def baz(x: Box): Unit = x.a = "present" + val lookup = java.lang.invoke.MethodHandles.lookup +} + +import java.lang.invoke._ +class Box(var a: Any) + +object Test { + def main(args: Array[String]): Unit = { + def lookup(name: String, params: Array[Class[_]], ret: Class[_]) = { + val mt = MethodType.methodType(ret, params) + O.lookup.findVirtual(O.getClass, name, mt) + } + val fooResult = (lookup("foo", Array(), classOf[String]).invokeExact(O): Int) + assert(fooResult == "foo") + + val barResult = (lookup("bar", Array(classOf[Int]), classOf[Int]).invokeExact(O, 42): Int) + assert(barResult == -42) + + val box = new Box(null) + (lookup("baz", Array(classOf[Box]), Void.TYPE).invokeExact(O, box) : Unit) + assert(box.a == "present") + + // Note: Application in statement position in a block in Java also infers return type of Unit, + // but we don't support that, ascribe the type to Unit as above. + // as done in Java. + // lookup("baz", Array(classOf[Box]), Void.TYPE).invokeExact(O, box) + () + } +} + +""" + def main(args: Array[String]): Unit = { + if (util.Properties.isJavaAtLeast("1.7")) test() + } + + def test() { + import scala.reflect.runtime._ + import scala.tools.reflect.ToolBox + + val m = currentMirror + val tb = m.mkToolBox() + import tb._ + eval(parse(code)) + } +} diff --git a/test/files/run/t9030.scala b/test/files/run/t9030.scala new file mode 100644 index 0000000000..48d24e5b54 --- /dev/null +++ b/test/files/run/t9030.scala @@ -0,0 +1,19 @@ +object Test extends App { + + // For these methods, the compiler emits calls to BoxesRuntime.equalsNumNum/equalsNumChar/equalsNumObject directly + + def numNum(a: java.lang.Number, b: java.lang.Number) = assert(a == b) + def numChar(a: java.lang.Number, b: java.lang.Character) = assert(a == b) + def numObject(a: java.lang.Number, b: java.lang.Object) = assert(a == b) + + // The compiler doesn't use equalsCharObject directly, but still adding an example for completeness + + def charObject(a: java.lang.Character, b: java.lang.Object) = assert(a == b) + + numNum(new Integer(1), new Integer(1)) + numChar(new Integer(97), new Character('a')) + numObject(new Integer(1), new Integer(1)) + numObject(new Integer(97), new Character('a')) + + charObject(new Character('a'), new Integer(97)) +} diff --git a/test/junit/scala/collection/mutable/ArrayBufferTest.scala b/test/junit/scala/collection/mutable/ArrayBufferTest.scala new file mode 100644 index 0000000000..8c83164027 --- /dev/null +++ b/test/junit/scala/collection/mutable/ArrayBufferTest.scala @@ -0,0 +1,36 @@ +package scala.collection.mutable + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.{Assert, Test} + +import scala.tools.testing.AssertUtil + +/* Test for SI-9043 */ +@RunWith(classOf[JUnit4]) +class ArrayBufferTest { + @Test + def testInsertAll: Unit = { + val traver = ArrayBuffer(2, 4, 5, 7) + val testSeq = List(1, 3, 6, 9) + + def insertAt(x: Int) = { + val clone = traver.clone() + clone.insertAll(x, testSeq) + clone + } + + // Just insert some at position 0 + Assert.assertEquals(ArrayBuffer(1, 3, 6, 9, 2, 4, 5, 7), insertAt(0)) + + // Insert in the middle + Assert.assertEquals(ArrayBuffer(2, 4, 1, 3, 6, 9, 5, 7), insertAt(2)) + + // No strange last position weirdness + Assert.assertEquals(ArrayBuffer(2, 4, 5, 7, 1, 3, 6, 9), insertAt(traver.size)) + + // Overflow is caught + AssertUtil.assertThrows[IndexOutOfBoundsException] { insertAt(-1) } + AssertUtil.assertThrows[IndexOutOfBoundsException] { insertAt(traver.size + 10) } + } +} diff --git a/test/junit/scala/io/SourceTest.scala b/test/junit/scala/io/SourceTest.scala new file mode 100644 index 0000000000..3138a4589c --- /dev/null +++ b/test/junit/scala/io/SourceTest.scala @@ -0,0 +1,86 @@ + +package scala.io + +import org.junit.Test +import org.junit.Assert._ +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.AssertUtil._ + +import java.io.{ Console => _, _ } + +@RunWith(classOf[JUnit4]) +class SourceTest { + + private implicit val `our codec` = Codec.UTF8 + private val charSet = Codec.UTF8.charSet.name + + private def sampler = """ + |Big-endian and little-endian approaches aren't + |readily interchangeable in general, because the + |laws of arithmetic send signals leftward from + |the bits that are "least significant." + |""".stripMargin.trim + + private def in = new ByteArrayInputStream(sampler.getBytes) + + @Test def canIterateLines() = { + assertEquals(sampler.lines.size, (Source fromString sampler).getLines.size) + } + @Test def canCustomizeReporting() = { + class CapitalReporting(is: InputStream) extends BufferedSource(is) { + override def report(pos: Int, msg: String, out: PrintStream): Unit = { + out print f"$pos%04x: ${msg.toUpperCase}" + } + class OffsetPositioner extends Positioner(null) { + override def next(): Char = { + ch = iter.next() + pos = pos + 1 + ch + } + } + withPositioning(new OffsetPositioner) + } + val s = new CapitalReporting(in) + // skip to next line and report an error + do { + val c = s.next() + } while (s.ch != '\n') + s.next() + val out = new ByteArrayOutputStream + val ps = new PrintStream(out, true, charSet) + s.reportError(s.pos, "That doesn't sound right.", ps) + assertEquals("0030: THAT DOESN'T SOUND RIGHT.", out.toString(charSet)) + } + @Test def canAltCustomizeReporting() = { + class CapitalReporting(is: InputStream)(implicit codec: Codec) extends Source { + override val iter = { + val r = new InputStreamReader(is, codec.decoder) + Iterator continually (codec wrap r.read()) takeWhile (_ != -1) map (_.toChar) + } + override def report(pos: Int, msg: String, out: PrintStream): Unit = { + out print f"$pos%04x: ${msg.toUpperCase}" + } + private[this] var _pos: Int = _ + override def pos = _pos + private[this] var _ch: Char = _ + override def ch = _ch + override def next = { + _ch = iter.next() + _pos += 1 + _ch + } + } + val s = new CapitalReporting(in) + // skip to next line and report an error + do { + val c = s.next() + } while (s.ch != '\n') + s.next() + val out = new ByteArrayOutputStream + val ps = new PrintStream(out, true, charSet) + s.reportError(s.pos, "That doesn't sound right.", ps) + assertEquals("0030: THAT DOESN'T SOUND RIGHT.", out.toString(charSet)) + } +} diff --git a/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala b/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala new file mode 100644 index 0000000000..1fff9c9a32 --- /dev/null +++ b/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala @@ -0,0 +1,555 @@ +package scala.tools.nsc.transform.patmat + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.collection.mutable +import scala.tools.nsc.{Global, Settings} + +object TestSolver extends Logic with Solving { + + val global: Global = new Global(new Settings()) + + // disable max recursion depth in order to get all solutions + global.settings.YpatmatExhaustdepth.tryToSet("off" :: Nil) + + object TestSolver extends Solver { + + class Const { + override def toString: String = "Const" + } + + val NullConst = new Const + type Type = Int + + case class TypeConst(i: Int) extends Const + + object TypeConst extends TypeConstExtractor + + case class ValueConst(i: Int) extends Const + + object ValueConst extends ValueConstExtractor { + def apply(t: Tree): Const = ??? + } + + case class Tree(name: String) + + class Var(val x: Tree) extends AbsVar { + + override def equals(other: scala.Any): Boolean = other match { + case that: Var => this.x == that.x + case _ => false + } + + override def hashCode(): Int = x.hashCode() + + override def toString: String = { + s"Var($x)" + } + + def domainSyms = None + + def implications = Nil + + def mayBeNull = false + + def propForEqualsTo(c: Const): Prop = ??? + + def registerEquality(c: Const) = () + + def registerNull() = () + + def symForStaticTp = None + } + + object Var extends VarExtractor { + def apply(x: Tree): Var = new Var(x) + + def unapply(v: Var): Some[Tree] = Some(v.x) + } + + def prepareNewAnalysis() = {} + + def reportWarning(msg: String) = sys.error(msg) + + /** + * The DPLL procedure only returns a minimal mapping from literal to value + * such that the CNF formula is satisfied. + * E.g. for: + * `(a \/ b)` + * The DPLL procedure will find either {a = true} or {b = true} + * as solution. + * + * The expansion step will amend both solutions with the unassigned variable + * i.e., {a = true} will be expanded to {a = true, b = true} and + * {a = true, b = false}. + */ + def expandUnassigned(solution: Solution): List[Model] = { + import solution._ + + // the number of solutions is doubled for every unassigned variable + val expandedModels = 1 << unassigned.size + var current = mutable.ArrayBuffer[Model]() + var next = mutable.ArrayBuffer[Model]() + current.sizeHint(expandedModels) + next.sizeHint(expandedModels) + + current += model + + // we use double buffering: + // read from `current` and create a two models for each model in `next` + for { + s <- unassigned + } { + for { + model <- current + } { + def force(s: Sym, pol: Boolean) = model + (s -> pol) + + next += force(s, pol = true) + next += force(s, pol = false) + } + + val tmp = current + current = next + next = tmp + + next.clear() + } + + current.toList + } + + /** + * Old CNF conversion code, used for reference: + * - convert formula into NNF + * (i.e., no negated terms, only negated variables) + * - use distributive laws to convert into CNF + */ + def eqFreePropToSolvableViaDistribution(p: Prop) = { + val symbolMapping = new SymbolMapping(gatherSymbols(p)) + + type Formula = Array[TestSolver.Clause] + + def formula(c: Clause*): Formula = c.toArray + + def merge(a: Clause, b: Clause) = a ++ b + + def negationNormalFormNot(p: Prop): Prop = p match { + case And(ps) => Or(ps map negationNormalFormNot) + case Or(ps) => And(ps map negationNormalFormNot) + case Not(p) => negationNormalForm(p) + case True => False + case False => True + case s: Sym => Not(s) + } + + def negationNormalForm(p: Prop): Prop = p match { + case Or(ps) => Or(ps map negationNormalForm) + case And(ps) => And(ps map negationNormalForm) + case Not(negated) => negationNormalFormNot(negated) + case True + | False + | (_: Sym) => p + } + + val TrueF: Formula = Array() + val FalseF = Array(clause()) + def lit(sym: Sym) = Array(clause(symbolMapping.lit(sym))) + def negLit(sym: Sym) = Array(clause(-symbolMapping.lit(sym))) + + def conjunctiveNormalForm(p: Prop): Formula = { + def distribute(a: Formula, b: Formula): Formula = + (a, b) match { + // true \/ _ = true + // _ \/ true = true + case (trueA, trueB) if trueA.size == 0 || trueB.size == 0 => TrueF + // lit \/ lit + case (a, b) if a.size == 1 && b.size == 1 => formula(merge(a(0), b(0))) + // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d)) + // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn)) + case (cs, ds) => + val (big, small) = if (cs.size > ds.size) (cs, ds) else (ds, cs) + big flatMap (c => distribute(formula(c), small)) + } + + p match { + case True => TrueF + case False => FalseF + case s: Sym => lit(s) + case Not(s: Sym) => negLit(s) + case And(ps) => + ps.toArray flatMap conjunctiveNormalForm + case Or(ps) => + ps map conjunctiveNormalForm reduceLeft { (a, b) => + distribute(a, b) + } + } + } + val cnf = conjunctiveNormalForm(negationNormalForm(p)) + Solvable(cnf, symbolMapping) + } + + } + +} + +/** + * Testing CNF conversion via Tseitin vs NNF & expansion. + */ +@RunWith(classOf[JUnit4]) +class SolvingTest { + + import scala.tools.nsc.transform.patmat.TestSolver.TestSolver._ + + implicit val Ord: Ordering[TestSolver.TestSolver.Model] = Ordering.by { + _.toSeq.sortBy(_.toString()).toIterable + } + + private def sym(name: String) = Sym(Var(Tree(name)), NullConst) + + @Test + def testSymCreation() { + val s1 = sym("hello") + val s2 = sym("hello") + assertEquals(s1, s2) + } + + /** + * Simplest possible test: solve a formula and check the solution(s) + */ + @Test + def testUnassigned() { + val pSym = sym("p") + val solvable = propToSolvable(Or(pSym, Not(pSym))) + val solutions = TestSolver.TestSolver.findAllModelsFor(solvable) + val expected = List(Solution(Map(), List(pSym))) + assertEquals(expected, solutions) + } + + /** + * Unassigned variables must be expanded + * for stable results + */ + @Test + def testNoUnassigned() { + val pSym = sym("p") + val qSym = sym("q") + val solvable = propToSolvable(Or(pSym, Not(qSym))) + val solutions = findAllModelsFor(solvable) + val expanded = solutions.flatMap(expandUnassigned).sorted + val expected = Seq( + Map(pSym -> false, qSym -> false), + Map(pSym -> true, qSym -> false), + Map(pSym -> true, qSym -> true) + ).sorted + + assertEquals(expected, expanded) + } + + @Test + def testTseitinVsExpansionFrom_t7020() { + val formulas = Seq( + And(And(And(Not(sym("V1=null")), + sym("V1=scala.collection.immutable.::[?]")), And(Not(sym("V1=null")), + And(Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), sym("V3=Nil")))), + And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))))), + + And(And(And(Not(sym("V1=null")), + sym("V1=scala.collection.immutable.::[?]")), And(Not(sym("V1=null")), + And(sym("V2=7"), sym("V3=Nil")))), + And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), And(And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil")))))))), + + And(And(Not(sym("V1=null")), + sym("V1=scala.collection.immutable.::[?]")), And(Not(sym("V1=null")), + And(Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), sym("V3=Nil")))), + + And(And(Not(sym("V1=null")), sym("V1=scala.collection.immutable.::[?]")), + And(Not(sym("V1=null")), And(sym("V2=7"), sym("V3=Nil")))), + + And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), And(And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))))), + + And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil"))))))), + + And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), And(sym("V1=Nil"), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil"))))))))), + + And(And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), Not(sym("V1=Nil"))))), + + And(And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil")))))), + + And(And(Or(sym("V3=scala.collection.immutable.::[?]"), sym("V3=Nil")), + Or(sym("V1=scala.collection.immutable.::[?]"), sym("V1=Nil"))), + And(And(Or(Or(False, Not(sym("V1=scala.collection.immutable.::[?]"))), + Or(False, Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), Not(sym("V1=Nil")))))), + + And(Not(sym("V1=null")), And(Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), + sym("V3=Nil"))), + + And(Not(sym("V1=null")), And(sym("V2=7"), sym("V3=Nil"))), + + And(Not(sym("V1=null")), sym("V1=scala.collection.immutable.::[?]")), + + And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + + And(Not(sym("V2=5")), Not(sym("V2=6"))), + + And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))), + + And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), + + And(Or(Not(sym("V3=Nil")), Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))), + + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null"))))), + + And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), Not(sym("V1=Nil")))), + + And(Or(Or(False, Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), + + And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), Not(sym("V1=Nil"))), + + And(Or(Or(sym("V1=null"), Not(sym("V1=scala.collection.immutable.::[?]"))), + Or(sym("V1=null"), Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), + Not(sym("V2=6")))), Not(sym("V3=Nil"))))), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil"))))))), + + And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), + + And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), + And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), And(sym("V1=Nil"), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))))))), + + And(Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), sym("V3=Nil")), + + And(Or(sym("V3=scala.collection.immutable.::[?]"), Or(sym("V3=Nil"), + sym("V3=null"))), And(Or(Not(sym("V3=Nil")), Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null"))))))), + + And(Or(sym("V3=scala.collection.immutable.::[?]"), + sym("V3=Nil")), Or(sym("V1=scala.collection.immutable.::[?]"), + sym("V1=Nil"))), + + And(sym("V1=Nil"), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))))), + + And(sym("V2=7"), sym("V3=Nil")), + + False, + + Not(sym("V1=Nil")), + + Or(And(Not(sym("V2=4")), + And(Not(sym("V2=5")), Not(sym("V2=6")))), Not(sym("V3=Nil"))), + + Or(False, Not(sym("V1=scala.collection.immutable.::[?]"))), + + Or(False, + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil")))), + + Or(False, Or(Not(sym("V2=1")), Not(sym("V3=Nil")))), + + Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + + Or(Not(sym("V3=scala.collection.immutable.::[?]")), Not(sym("V3=null"))), + + Or(Or(False, Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), + + Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), + + Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), + + Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), + + Or(sym("V1=null"), Not(sym("V1=scala.collection.immutable.::[?]"))), + + Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil")))), + + Or(sym("V1=null"), Or(Not(sym("V2=1")), Not(sym("V3=Nil")))), + + Or(sym("V1=scala.collection.immutable.::[?]"), + Or(sym("V1=Nil"), sym("V1=null"))), + + Or(sym("V1=scala.collection.immutable.::[?]"), sym("V1=Nil")), + + Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), + + sym("V3=scala.collection.immutable.::[?]") + ) + + formulas foreach { + f => + // build CNF + val tseitinCnf = propToSolvable(f) + val expansionCnf = eqFreePropToSolvableViaDistribution(f) + + // ALL-SAT + val tseitinSolutions = findAllModelsFor(tseitinCnf) + val expansionSolutins = findAllModelsFor(expansionCnf) + + // expand unassigned variables + // (otherwise solutions can not be compared) + val tseitinNoUnassigned = tseitinSolutions.flatMap(expandUnassigned).sorted + val expansionNoUnassigned = expansionSolutins.flatMap(expandUnassigned).sorted + assertEquals(tseitinNoUnassigned, expansionNoUnassigned) + } + } +} + + diff --git a/versions.properties b/versions.properties index d24a3bb952..a474b19c5b 100644 --- a/versions.properties +++ b/versions.properties @@ -18,7 +18,7 @@ scala.full.version=2.11.2 # external modules shipped with distribution, as specified by scala-library-all's pom scala-xml.version.number=1.0.3 -scala-parser-combinators.version.number=1.0.2 +scala-parser-combinators.version.number=1.0.3 scala-continuations-plugin.version.number=1.0.2 scala-continuations-library.version.number=1.0.2 scala-swing.version.number=1.0.1 |