diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml new file mode 100644 index 0000000000..9ccda128e2 --- /dev/null +++ b/.github/workflows/build-docs.yml @@ -0,0 +1,16 @@ +name: Build docs +on: + pull_request: + push: + branches: + - main +jobs: + build-docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: ammaraskar/sphinx-action@0.4 + with: + pre-build-command: "pip install Sphinx==7.0.1 recommonmark==0.7.1" + docs-folder: "docs/" + build-command: 'sphinx-build -b html . _build/html -W --keep-going -n' diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 1c8ad95a47..53646aa3f6 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -24,7 +24,7 @@ jobs: - uses: actions/checkout@v3 - uses: ./.github/actions/linux-setup-env with: - scala-version: "2.13.10" #Unused, any version can be placed here + scala-version: "2.13.11" #Unused, any version can be placed here java-version: 8 - name: Setup PGP Key diff --git a/.github/workflows/publishScala3CompilerPlugin.yml b/.github/workflows/publishScala3CompilerPlugin.yml new file mode 100644 index 0000000000..cefe59b1e4 --- /dev/null +++ b/.github/workflows/publishScala3CompilerPlugin.yml @@ -0,0 +1,46 @@ +# Flow dedicated to publishing compiler plugins for Scala 3 RC versions +name: Publish Scala 3 compiler plugin +on: + workflow_dispatch: + inputs: + scala-version: + type: string + description: "Version of Scala 3 compiler for which plugin should be published" + default: "" + scala-native-version: + type: string + description: "Tag of Scala Native release which should be used for release" + +jobs: + publish: + name: Publish + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.scala-native-version }} + - uses: ./.github/actions/linux-setup-env + with: + scala-version: ${{ inputs.scala-version }} + java-version: 8 + + - name: Setup PGP Key + run: | + echo -n "$PGP_SECRET" | base64 --decode | gpg --batch --import + env: + PGP_SECRET: ${{ secrets.PGP_SECRET }} + + - name: Publish release + env: + MAVEN_USER: "${{ secrets.SONATYPE_USER }}" + MAVEN_PASSWORD: "${{ secrets.SONATYPE_PASSWORD }}" + PGP_PASSPHRASE: "${{ secrets.PGP_PASSWORD }}" + run: > + sbt ' + set crossScalaVersions += "${{ inputs.scala-version }}"; + set nscPlugin.forBinaryVersion("3")/crossScalaVersions += "${{ inputs.scala-version }}"; + set junitPlugin.forBinaryVersion("3")/crossScalaVersions += "${{ inputs.scala-version }}"; + ++${{ inputs.scala-version }} -v; + nscplugin3/publishSigned; + junitPlugin3/publishSigned + ' diff --git a/.github/workflows/run-jdk-compliance-tests.yml b/.github/workflows/run-jdk-compliance-tests.yml index 34fae979a4..b5f7f425c9 100644 --- a/.github/workflows/run-jdk-compliance-tests.yml +++ b/.github/workflows/run-jdk-compliance-tests.yml @@ -7,7 +7,7 @@ on: - main - 0.4.x concurrency: - group: jdk-compliance-${{ github.head_ref }} + group: jdk-compliance-${{ github.head_ref }}-${{ github.event_name }} cancel-in-progress: true jobs: @@ -20,7 +20,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-20.04, macos-11] - scala: [3.2.2] + scala: [3.3.0] java: [11, 17] steps: - uses: actions/checkout@v3 @@ -44,7 +44,7 @@ jobs: strategy: fail-fast: false matrix: - scala: [3.2.2] + scala: [3.3.0] java: [11, 17] steps: # Disable autocrlf setting, otherwise scalalib patches might not be possible to apply diff --git a/.github/workflows/run-tests-linux-multiarch.yml b/.github/workflows/run-tests-linux-multiarch.yml index 665081da7e..4004358827 100644 --- a/.github/workflows/run-tests-linux-multiarch.yml +++ b/.github/workflows/run-tests-linux-multiarch.yml @@ -79,7 +79,7 @@ jobs: fail-fast: false matrix: arch: [linux-arm64] - scala: [2.13.10, 3.2.2] + scala: [2.13.10, 3.3.0] build-mode: [debug, release-fast] lto: [none, thin] gc: [boehm, immix, commix] diff --git a/.github/workflows/run-tests-linux.yml b/.github/workflows/run-tests-linux.yml index f5357f5b64..7ba514763f 100644 --- a/.github/workflows/run-tests-linux.yml +++ b/.github/workflows/run-tests-linux.yml @@ -5,8 +5,9 @@ on: push: branches: - main + - 0.4.x concurrency: - group: linux-${{ github.head_ref }} + group: linux-${{ github.head_ref }}-${{ github.event_name }} cancel-in-progress: true jobs: @@ -19,7 +20,7 @@ jobs: strategy: fail-fast: false matrix: - scala: [3.2.2, 2.13.10, 2.12.17] + scala: [3.3.0, 2.13.11, 2.12.18] steps: - uses: actions/checkout@v3 - uses: ./.github/actions/linux-setup-env @@ -27,7 +28,7 @@ jobs: scala-version: ${{matrix.scala}} - name: Test tools - run: sbt "++ ${{ matrix.scala }} -v" "-no-colors" "-J-Xmx3G" "test-tools ${{ matrix.scala }}" + run: sbt "++ ${{ matrix.scala }} -v" "test-tools ${{ matrix.scala }}; toolsBenchmarks${{env.project-version}}/Jmh/compile" # Make sure that Scala partest blacklisted tests contain only valid test names - name: Setup Scala-cli @@ -38,7 +39,6 @@ jobs: if: ${{ !startsWith(matrix.scala, '3.') }} run: | sbt "++ ${{ matrix.scala }} -v" \ - "-no-colors" \ "scalaPartest${{env.project-version}}/fetchScalaSource" scala-cli scripts/partest-check-files.scala -- ${{ matrix.scala }} @@ -53,40 +53,73 @@ jobs: "-J-Xmx3G" \ "scalaPartestTests${{env.project-version}}/testOnly -- --showDiff neg/abstract.scala pos/abstract.scala run/Course-2002-01.scala" - #Main tests grid. Builds and runs tests agains multiple combination of GC, Build mode and Scala Version - #It can be extended to test against different OS and Arch settings + test-compiler-plugins: + runs-on: ubuntu-20.04 + strategy: + fail-fast: false + matrix: + scala: [3.3.0, 2.13.11, 2.12.18] + if: "(github.event_name == 'schedule' || github.event_name == 'workflow_call') && github.repository == 'scala-native/scala-native'" + steps: + - uses: actions/checkout@v3 + - uses: ./.github/actions/linux-setup-env + with: + scala-version: ${{matrix.scala}} + java-version: 8 + + - name: Test cross compilation of compiler plugins + run: sbt "+nscplugin${{env.project-version}}/compile; +junitPlugin${{env.project-version}}/compile" "-J-Xmx3G" + test-runtime: name: Test runtime + runs-on: ubuntu-22.04 + needs: tests-tools + strategy: + fail-fast: false + matrix: + scala: [3.3.0] + gc: [none, boehm, immix, commix] + include: + - scala: 2.13.11 + gc: immix + - scala: 2.12.18 + gc: immix + steps: + - uses: actions/checkout@v3 + - uses: ./.github/actions/linux-setup-env + with: + scala-version: ${{matrix.scala}} + - name: Run tests + env: + SCALANATIVE_GC: ${{ matrix.gc }} + run: sbt "test-runtime ${{ matrix.scala }}" + + test-runtime-ext: + name: Test runtime extension runs-on: ubuntu-20.04 needs: tests-tools strategy: fail-fast: false matrix: - scala: [3.2.2, 2.13.10] + scala: [3.3.0, 2.13.11] build-mode: [debug, release-fast] gc: [boehm, immix, commix] # Create holes in grid to lower number of tests. # Excluded entries should have low impact on overall project coverage exclude: - - scala: 2.13.10 + # Covered in basic test-runtime + - scala: 3.3.0 build-mode: debug - gc: immix - include: - - scala: 2.12.17 + - scala: 2.13.11 build-mode: debug gc: immix - - scala: 2.12.17 - build-mode: release-fast - gc: commix - - scala: 2.12.17 + include: + - scala: 3.3.0 build-mode: release-size gc: commix - scala: 3.1.3 build-mode: debug gc: immix - - scala: 3.1.3 - build-mode: release-fast - gc: commix - scala: 3.1.3 build-mode: release-size gc: commix @@ -112,10 +145,10 @@ jobs: strategy: fail-fast: false matrix: - scala: [3.2.2, 2.13.10] + scala: [3.3.0, 2.13.11] build-mode: [debug] include: - - scala: 2.13.10 + - scala: 2.13.11 build-mode: release-fast steps: - uses: actions/checkout@v3 @@ -137,7 +170,7 @@ jobs: strategy: fail-fast: false matrix: - scala: [3.2.2, 2.13.10] + scala: [3.3.0, 2.13.10] lto: [thin] optimize: [true] include: @@ -145,7 +178,7 @@ jobs: - scala: 2.13.10 lto: full optimize: true - - scala: 3.2.2 + - scala: 3.3.0 lto: full optimize: false @@ -171,7 +204,9 @@ jobs: strategy: fail-fast: false matrix: - scala: [2.12.17, 3.1.3] + scala: [3.3.0] + llvm: [13, 14, 15, 16, 17] # Last 3 stable versions + available future versions + if: "(github.event_name == 'schedule' || github.event_name == 'workflow_call') && github.repository == 'scala-native/scala-native'" steps: - uses: actions/checkout@v3 - uses: ./.github/actions/linux-setup-env @@ -181,8 +216,7 @@ jobs: - name: Run tests env: SCALANATIVE_MODE: release-fast - SCALANATIVE_GC: immix - SCALANATIVE_OPTIMIZE: true - run: | - export LLVM_BIN=$(dirname $(readlink -f /usr/bin/clang)) - sbt "test-scripted ${{matrix.scala}}" + SCALANATIVE_LTO: thin + LLVM_BIN: "/usr/lib/llvm-${{ matrix.llvm }}/bin" + run: sbt "test-runtime ${{ matrix.scala }}" + diff --git a/.github/workflows/run-tests-macos.yml b/.github/workflows/run-tests-macos.yml index 5b2c4993c7..4c4e0c5513 100644 --- a/.github/workflows/run-tests-macos.yml +++ b/.github/workflows/run-tests-macos.yml @@ -17,17 +17,15 @@ jobs: strategy: fail-fast: false matrix: - scala: [3.2.2, 2.13.10, 2.12.17] - gc: [immix] + scala: [3.3.0] + gc: [none, boehm, immix, commix] include: - scala: 2.13.10 gc: commix - scala: 2.13.10 gc: none - - scala: 2.12.17 + - scala: 2.12.18 gc: boehm - - scala: 3.1.3 - gc: immix steps: - uses: actions/checkout@v3 @@ -50,13 +48,90 @@ jobs: sbt ++${{matrix.scala}} "scalaPartestTests${{env.project-version}}/testOnly -- --showDiff neg/abstract.scala pos/abstract.scala run/Course-2002-01.scala" + test-runtime-ext: + name: Test runtime extension + runs-on: macos-11 + needs: [test-runtime] + if: "(github.event_name == 'schedule' || github.event_name == 'workflow_call') && github.repository == 'scala-native/scala-native'" + strategy: + fail-fast: false + matrix: + scala: [3.3.0, 2.13.11] + build-mode: [debug, release-fast] + lto: [none, thin] + gc: [boehm, immix, commix] + exclude: + # Covered in basic test-runtime + - scala: 3.3.0 + build-mode: debug + - scala: 2.13.11 + build-mode: debug + gc: immix + include: + - scala: 3.3.0 + build-mode: release-size + lto: thin + gc: immix + - scala: 3.3.0 + build-mode: release-full + lto: thin + gc: commix + - scala: 2.13.11 + build-mode: release-full + lto: full + gc: commix + steps: + - uses: actions/checkout@v3 + - uses: ./.github/actions/macos-setup-env + with: + scala-version: ${{matrix.scala}} + gc: ${{ matrix.gc }} + + - name: Prepare setConfig command + # Linking on MacOS in GithubActions fails when using default linker (ld), use lld instead + run: | + SetConfigTemplate=$(cat << EOM + nativeConfig ~= { prev => + prev + .withLinkingOptions(prev.linkingOptions ++ Seq("-fuse-ld=lld") ) + } + EOM + ) + echo set-native-config=${SetConfigTemplate} >> $GITHUB_ENV + + - name: Run tests + env: + SCALANATIVE_MODE: ${{ matrix.build-mode }} + SCALANATIVE_GC: ${{ matrix.gc }} + SCALANATIVE_LTO: ${{ matrix.lto }} + SCALANATIVE_OPTIMIZE: true + TEST_COMMAND: > + ++${{matrix.scala}} -v; + set sandbox.forBinaryVersion("${{env.binary-version}}")/${{env.set-native-config}}; + set tests.forBinaryVersion("${{env.binary-version}}")/${{env.set-native-config}}; + set junitTestOutputsNative.forBinaryVersion("${{env.binary-version}}")/${{env.set-native-config}}; + set scalaPartestJunitTests.forBinaryVersion("${{env.binary-version}}")/${{env.set-native-config}}; + + show tests${{env.project-version}}/nativeConfig; + + sandbox${{env.project-version}}/run; + testsJVM${{env.project-version}}/test; + tests${{env.project-version}}/test; + junitTestOutputsJVM${{env.project-version}}/test; + junitTestOutputsNative${{env.project-version}}/test; + scalaPartestJunitTests${{env.project-version}}/test + run: | + export LLVM_BIN="$(brew --prefix llvm@15)/bin" + $LLVM_BIN/clang --version + sbt -J-Xmx5G "${TEST_COMMAND}" + run-scripted-tests: name: Scripted tests runs-on: macos-11 strategy: fail-fast: false matrix: - scala: [2.12.17, 3.1.3] + scala: [2.12.18, 3.1.3] steps: - uses: actions/checkout@v3 - uses: ./.github/actions/macos-setup-env diff --git a/.github/workflows/run-tests-windows.yml b/.github/workflows/run-tests-windows.yml index 179b3ea894..c283e54491 100644 --- a/.github/workflows/run-tests-windows.yml +++ b/.github/workflows/run-tests-windows.yml @@ -17,17 +17,13 @@ jobs: strategy: fail-fast: false matrix: - scala: [3.2.2, 2.13.10] - gc: [boehm, immix, commix] + scala: [3.3.0, 2.13.10] + gc: [none, boehm, immix, commix] include: - - scala: 2.12.17 + - scala: 2.12.18 gc: immix - - scala: 2.12.17 - gc: commix - scala: 2.13.10 - gc: none - - scala: 3.1.3 - gc: immix + gc: boehm steps: # Disable autocrlf setting, otherwise scalalib patches might not be possible to apply - name: Setup git config @@ -74,7 +70,7 @@ jobs: strategy: fail-fast: false matrix: - scala: [2.12.17, 3.1.3] + scala: [2.12.18, 3.1.3] steps: # Disable autocrlf setting, otherwise scalalib patches might not be possible to apply - name: Setup git config @@ -94,15 +90,30 @@ jobs: strategy: fail-fast: false matrix: - scala: [3.2.2, 2.13.10] - build-mode: [release-fast] - lto: [thin] - optimize: [true] + scala: [3.3.0, 2.13.11] + build-mode: [debug, release-fast] + lto: [none, thin] + gc: [boehm, immix, commix] + exclude: + # Covered in basic test-runtime + - scala: 3.3.0 + build-mode: debug + - scala: 2.13.11 + build-mode: debug + gc: immix + - build-mode: release-fast + lto: none include: - - scala: 3.2.2 - lto: full - optimize: true - - scala: 2.13.10 + - scala: 3.3.0 + build-mode: release-size + lto: thin + gc: immix + - scala: 3.3.0 + build-mode: release-full + lto: thin + gc: commix + - scala: 2.13.11 + build-mode: release-full lto: full optimize: true - scala: 2.12.17 @@ -138,7 +149,7 @@ jobs: strategy: fail-fast: false matrix: - scala: [3.2.2] + scala: [3.3.0] llvm: ["13.0.1", "14.0.6", "15.0.7"] # Last 3 stable versions steps: - name: Setup git config diff --git a/LICENSE.md b/LICENSE.md index f2f2c498ec..15104927e0 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1847,3 +1847,7 @@ The original license notice is included below: Additions by this project to the original `epollcat` implementation carry the Scala Native license. + +# License notice for LLVM + +Scala Native's `tools/` contains parts that are derived from the [LLVM Project](https://llvm.org/). Those parts are either marked with `// ported from LLVM` and/or include the full copyright preamble in the source code file. The original code was licensed under Apache License Version v2.0 with LLVM Exceptions. diff --git a/auxlib/src/main/scala/scala/runtime/BoxesRunTime.scala b/auxlib/src/main/scala/scala/runtime/BoxesRunTime.scala index 7c3314722b..00eeef577b 100644 --- a/auxlib/src/main/scala/scala/runtime/BoxesRunTime.scala +++ b/auxlib/src/main/scala/scala/runtime/BoxesRunTime.scala @@ -1,45 +1,734 @@ package scala.runtime import scala.math.ScalaNumber -import scala.scalanative.unsafe._ +import scala.annotation.{nowarn, switch} class BoxesRunTime +/** An object (static class) that defines methods used for creating, reverting, + * and calculating with, boxed values. There are four classes of methods in + * this object: + * - Convenience boxing methods which call the static valueOf method on the + * boxed class, thus utilizing the JVM boxing cache. + * - Convenience unboxing methods returning default value on null. + * - The generalised comparison method to be used when an object may be a + * boxed value. + * - Standard value operators for boxed java.lang.Number and + * quasi-java.lang.Number values. + */ object BoxesRunTime { - def boxToBoolean(v: scala.Boolean): java.lang.Boolean = + private final val CHAR = 0 + private final val BYTE = 1 + private final val SHORT = 2 + private final val INT = 3 + private final val LONG = 4 + private final val FLOAT = 5 + private final val DOUBLE = 6 + private final val OTHER = 7 + + /** We don't need to return BYTE and SHORT, as everything which might care + * widens to INT. + */ + private def typeCode(a: java.lang.Object): scala.Int = a match { + case _: java.lang.Integer => INT + case _: java.lang.Double => DOUBLE + case _: java.lang.Long => LONG + case _: java.lang.Character => CHAR + case _: java.lang.Float => FLOAT + case _: java.lang.Byte => INT + case _: java.lang.Short => INT + case _ => OTHER + } + + // Boxing + @inline def boxToBoolean(v: scala.Boolean): java.lang.Boolean = java.lang.Boolean.valueOf(v) - def boxToCharacter(v: scala.Char): java.lang.Character = + @inline def boxToCharacter(v: scala.Char): java.lang.Character = java.lang.Character.valueOf(v) - def boxToByte(v: scala.Byte): java.lang.Byte = + @inline def boxToByte(v: scala.Byte): java.lang.Byte = java.lang.Byte.valueOf(v) - def boxToShort(v: scala.Short): java.lang.Short = + @inline def boxToShort(v: scala.Short): java.lang.Short = java.lang.Short.valueOf(v) - def boxToInteger(v: scala.Int): java.lang.Integer = + @inline def boxToInteger(v: scala.Int): java.lang.Integer = java.lang.Integer.valueOf(v) - def boxToLong(v: scala.Long): java.lang.Long = + @inline def boxToLong(v: scala.Long): java.lang.Long = java.lang.Long.valueOf(v) - def boxToFloat(v: scala.Float): java.lang.Float = + @inline def boxToFloat(v: scala.Float): java.lang.Float = java.lang.Float.valueOf(v) - def boxToDouble(v: scala.Double): java.lang.Double = + @inline def boxToDouble(v: scala.Double): java.lang.Double = java.lang.Double.valueOf(v) - def unboxToBoolean(o: java.lang.Object): scala.Boolean = + // Unboxing + @inline def unboxToBoolean(o: java.lang.Object): scala.Boolean = if (o == null) false else o.asInstanceOf[java.lang.Boolean].booleanValue - def unboxToChar(o: java.lang.Object): scala.Char = + @inline def unboxToChar(o: java.lang.Object): scala.Char = if (o == null) 0 else o.asInstanceOf[java.lang.Character].charValue - def unboxToByte(o: java.lang.Object): scala.Byte = + @inline def unboxToByte(o: java.lang.Object): scala.Byte = if (o == null) 0 else o.asInstanceOf[java.lang.Byte].byteValue - def unboxToShort(o: java.lang.Object): scala.Short = + @inline def unboxToShort(o: java.lang.Object): scala.Short = if (o == null) 0 else o.asInstanceOf[java.lang.Short].shortValue - def unboxToInt(o: java.lang.Object): scala.Int = + @inline def unboxToInt(o: java.lang.Object): scala.Int = if (o == null) 0 else o.asInstanceOf[java.lang.Integer].intValue - def unboxToLong(o: java.lang.Object): scala.Long = + @inline def unboxToLong(o: java.lang.Object): scala.Long = if (o == null) 0 else o.asInstanceOf[java.lang.Long].longValue - def unboxToFloat(o: java.lang.Object): scala.Float = + @inline def unboxToFloat(o: java.lang.Object): scala.Float = if (o == null) 0 else o.asInstanceOf[java.lang.Float].floatValue - def unboxToDouble(o: java.lang.Object): scala.Double = + @inline def unboxToDouble(o: java.lang.Object): scala.Double = if (o == null) 0 else o.asInstanceOf[java.lang.Double].doubleValue + // Comparsion + @inline def equals(x: java.lang.Object, y: java.lang.Object): Boolean = { + if (x eq y) true + else equals2(x, y) + } + + def equals2(x: java.lang.Object, y: java.lang.Object): Boolean = x match { + case x: java.lang.Number => equalsNumObject(x, y) + case x: java.lang.Character => equalsCharObject(x, y) + case null => y == null + case x => x.equals(y) + } + + def equalsNumObject(xn: java.lang.Number, y: java.lang.Object): Boolean = + y match { + case y: java.lang.Number => equalsNumNum(xn, y) + case y: java.lang.Character => equalsNumChar(xn, y) + case null => xn == null + case y => xn.equals(y) + } + + def equalsNumNum(xn: java.lang.Number, yn: java.lang.Number): Boolean = { + if (xn == null) yn == null + else { + val xcode = typeCode(xn) + val ycode = typeCode(yn) + val maxcode = if (xcode > ycode) xcode else ycode + (maxcode: @switch) match { + case INT => xn.intValue() == yn.intValue() + case LONG => xn.longValue() == yn.longValue() + case FLOAT => xn.floatValue() == yn.floatValue() + case DOUBLE => xn.doubleValue() == yn.doubleValue() + case _ => + if (yn.isInstanceOf[ScalaNumber] && !xn.isInstanceOf[ScalaNumber]) + yn.equals(xn) + else xn.equals(yn) + } + } + } + + def equalsCharObject(xc: java.lang.Character, y: java.lang.Object): Boolean = + y match { + case y: java.lang.Character => xc.charValue() == y.charValue() + case y: java.lang.Number => equalsNumChar(y, xc) + case null => xc == null + case _ => xc.equals(y) + } + + def equalsNumChar(xn: java.lang.Number, yc: java.lang.Character): Boolean = { + if (yc == null) xn == null + else { + val ch = yc.charValue() + (typeCode(xn): @switch) match { + case INT => xn.intValue() == ch + case LONG => xn.longValue() == ch + case FLOAT => xn.floatValue() == ch + case DOUBLE => xn.doubleValue() == ch + case _ => xn.equals(yc): @nowarn + } + } + } + + private def unboxCharOrInt( + arg1: java.lang.Object, + code: scala.Int + ): scala.Int = { + if (code == CHAR) + arg1.asInstanceOf[java.lang.Character].charValue() + else + arg1.asInstanceOf[java.lang.Number].intValue() + } + + private def unboxCharOrLong(arg1: java.lang.Object, code: scala.Int): Long = { + if (code == CHAR) + arg1.asInstanceOf[java.lang.Character].charValue() + else + arg1.asInstanceOf[java.lang.Number].longValue() + } + + private def unboxCharOrFloat( + arg1: java.lang.Object, + code: scala.Int + ): Float = { + if (code == CHAR) + arg1.asInstanceOf[java.lang.Character].charValue() + else + arg1.asInstanceOf[java.lang.Number].floatValue() + } + + private def unboxCharOrDouble( + arg1: java.lang.Object, + code: scala.Int + ): Double = { + if (code == CHAR) + arg1.asInstanceOf[java.lang.Character].charValue() + else + arg1.asInstanceOf[java.lang.Number].doubleValue() + } + + // Operators + def add(arg1: java.lang.Object, arg2: java.lang.Object): java.lang.Object = { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + val maxcode = if (code1 < code2) code2 else code1 + if (maxcode <= INT) + boxToInteger(unboxCharOrInt(arg1, code1) + unboxCharOrInt(arg2, code2)) + else if (maxcode <= LONG) + boxToLong(unboxCharOrLong(arg1, code1) + unboxCharOrLong(arg2, code2)) + else if (maxcode <= FLOAT) + boxToFloat(unboxCharOrFloat(arg1, code1) + unboxCharOrFloat(arg2, code2)) + else if (maxcode <= DOUBLE) + boxToDouble( + unboxCharOrDouble(arg1, code1) + unboxCharOrDouble(arg2, code2) + ) + else + throw new NoSuchMethodException() + } + + def subtract( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + val maxcode = if (code1 < code2) code2 else code1 + if (maxcode <= INT) + boxToInteger(unboxCharOrInt(arg1, code1) - unboxCharOrInt(arg2, code2)) + else if (maxcode <= LONG) + boxToLong(unboxCharOrLong(arg1, code1) - unboxCharOrLong(arg2, code2)) + else if (maxcode <= FLOAT) + boxToFloat(unboxCharOrFloat(arg1, code1) - unboxCharOrFloat(arg2, code2)) + else if (maxcode <= DOUBLE) + boxToDouble( + unboxCharOrDouble(arg1, code1) - unboxCharOrDouble(arg2, code2) + ) + else + throw new NoSuchMethodException() + } + + def multiply( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + val maxcode = if (code1 < code2) code2 else code1 + if (maxcode <= INT) + boxToInteger(unboxCharOrInt(arg1, code1) * unboxCharOrInt(arg2, code2)) + else if (maxcode <= LONG) + boxToLong(unboxCharOrLong(arg1, code1) * unboxCharOrLong(arg2, code2)) + else if (maxcode <= FLOAT) + boxToFloat(unboxCharOrFloat(arg1, code1) * unboxCharOrFloat(arg2, code2)) + else if (maxcode <= DOUBLE) + boxToDouble( + unboxCharOrDouble(arg1, code1) * unboxCharOrDouble(arg2, code2) + ) + else + throw new NoSuchMethodException() + } + + def divide( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + val maxcode = if (code1 < code2) code2 else code1 + if (maxcode <= INT) + boxToInteger(unboxCharOrInt(arg1, code1) / unboxCharOrInt(arg2, code2)) + else if (maxcode <= LONG) + boxToLong(unboxCharOrLong(arg1, code1) / unboxCharOrLong(arg2, code2)) + else if (maxcode <= FLOAT) + boxToFloat(unboxCharOrFloat(arg1, code1) / unboxCharOrFloat(arg2, code2)) + else if (maxcode <= DOUBLE) + boxToDouble( + unboxCharOrDouble(arg1, code1) / unboxCharOrDouble(arg2, code2) + ) + else + throw new NoSuchMethodException() + } + + def takeModulo( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + val maxcode = if (code1 < code2) code2 else code1 + if (maxcode <= INT) + boxToInteger(unboxCharOrInt(arg1, code1) % unboxCharOrInt(arg2, code2)) + else if (maxcode <= LONG) + boxToLong(unboxCharOrLong(arg1, code1) % unboxCharOrLong(arg2, code2)) + else if (maxcode <= FLOAT) + boxToFloat(unboxCharOrFloat(arg1, code1) % unboxCharOrFloat(arg2, code2)) + else if (maxcode <= DOUBLE) + boxToDouble( + unboxCharOrDouble(arg1, code1) % unboxCharOrDouble(arg2, code2) + ) + else + throw new NoSuchMethodException() + } + + def shiftSignedRight( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + if (code1 <= INT) { + val val1 = unboxCharOrInt(arg1, code1) + if (code2 <= INT) { + val val2 = unboxCharOrInt(arg2, code2) + return boxToInteger(val1 >> val2) + } else if (code2 <= LONG) { + val val2 = unboxCharOrLong(arg2, code2) + return boxToInteger(val1 >> val2): @nowarn + } + } + if (code1 <= LONG) { + val val1 = unboxCharOrLong(arg1, code1) + if (code2 <= INT) { + val val2 = unboxCharOrInt(arg2, code2) + return boxToLong(val1 >> val2) + } else if (code2 <= LONG) { + val val2 = unboxCharOrLong(arg2, code2) + return boxToLong(val1 >> val2) + } + } + throw new NoSuchMethodException() + } + + def shiftSignedLeft( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + if (code1 <= INT) { + val val1 = unboxCharOrInt(arg1, code1) + if (code2 <= INT) { + val val2 = unboxCharOrInt(arg2, code2) + return boxToInteger(val1 << val2) + } else if (code2 <= LONG) { + val val2 = unboxCharOrLong(arg2, code2) + return boxToInteger(val1 << val2): @nowarn + } + } + if (code1 <= LONG) { + val val1 = unboxCharOrLong(arg1, code1) + if (code2 <= INT) { + val val2 = unboxCharOrInt(arg2, code2) + return boxToLong(val1 << val2) + } else if (code2 <= LONG) { + val val2 = unboxCharOrLong(arg2, code2) + return boxToLong(val1 << val2) + } + } + throw new NoSuchMethodException() + } + + def shiftLogicalRight( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + if (code1 <= INT) { + val val1 = unboxCharOrInt(arg1, code1) + if (code2 <= INT) { + val val2 = unboxCharOrInt(arg2, code2) + return boxToInteger(val1 >>> val2) + } else if (code2 <= LONG) { + val val2 = unboxCharOrLong(arg2, code2) + return boxToInteger(val1 >>> val2): @nowarn + } + } + if (code1 <= LONG) { + val val1 = unboxCharOrLong(arg1, code1) + if (code2 <= INT) { + val val2 = unboxCharOrInt(arg2, code2) + return boxToLong(val1 >>> val2) + } else if (code2 <= LONG) { + val val2 = unboxCharOrLong(arg2, code2) + return boxToLong(val1 >>> val2) + } + } + throw new NoSuchMethodException() + } + + def negate(arg: java.lang.Object): java.lang.Object = { + val code = typeCode(arg) + if (code <= INT) { + val value = unboxCharOrInt(arg, code) + boxToInteger(-value) + } else if (code <= LONG) { + val value = unboxCharOrLong(arg, code) + boxToLong(-value) + } else if (code <= FLOAT) { + val value = unboxCharOrFloat(arg, code) + boxToFloat(-value) + } else if (code <= DOUBLE) { + val value = unboxCharOrDouble(arg, code) + boxToDouble(-value) + } else { + throw new NoSuchMethodException() + } + } + + def positive(arg: java.lang.Object): java.lang.Object = { + val code = typeCode(arg) + if (code <= INT) { + boxToInteger(+unboxCharOrInt(arg, code)) + } else if (code <= LONG) { + boxToLong(+unboxCharOrLong(arg, code)) + } else if (code <= FLOAT) { + boxToFloat(+unboxCharOrFloat(arg, code)) + } else if (code <= DOUBLE) { + boxToDouble(+unboxCharOrDouble(arg, code)) + } else { + throw new NoSuchMethodException() + } + } + + def takeAnd( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + if (arg1.isInstanceOf[Boolean] || arg2.isInstanceOf[Boolean]) { + if (arg1.isInstanceOf[Boolean] && arg2.isInstanceOf[Boolean]) { + boxToBoolean(arg1.asInstanceOf[Boolean] & arg2.asInstanceOf[Boolean]) + } else { + throw new NoSuchMethodException() + } + } else { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + val maxcode = if (code1 < code2) code2 else code1 + + if (maxcode <= INT) + boxToInteger(unboxCharOrInt(arg1, code1) & unboxCharOrInt(arg2, code2)) + else if (maxcode <= LONG) + boxToLong(unboxCharOrLong(arg1, code1) & unboxCharOrLong(arg2, code2)) + else + throw new NoSuchMethodException() + } + } + + def takeOr( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + if (arg1.isInstanceOf[Boolean] || arg2.isInstanceOf[Boolean]) { + if (arg1.isInstanceOf[Boolean] && arg2.isInstanceOf[Boolean]) { + boxToBoolean(arg1.asInstanceOf[Boolean] | arg2.asInstanceOf[Boolean]) + } else { + throw new NoSuchMethodException() + } + } else { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + val maxcode = if (code1 < code2) code2 else code1 + + if (maxcode <= INT) + boxToInteger(unboxCharOrInt(arg1, code1) | unboxCharOrInt(arg2, code2)) + else if (maxcode <= LONG) + boxToLong(unboxCharOrLong(arg1, code1) | unboxCharOrLong(arg2, code2)) + else + throw new NoSuchMethodException() + } + } + + def takeXor( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + if (arg1.isInstanceOf[Boolean] || arg2.isInstanceOf[Boolean]) { + if (arg1.isInstanceOf[Boolean] && arg2.isInstanceOf[Boolean]) { + boxToBoolean(arg1.asInstanceOf[Boolean] ^ arg2.asInstanceOf[Boolean]) + } else { + throw new NoSuchMethodException() + } + } else { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + val maxcode = if (code1 < code2) code2 else code1 + + if (maxcode <= INT) + boxToInteger(unboxCharOrInt(arg1, code1) ^ unboxCharOrInt(arg2, code2)) + else if (maxcode <= LONG) + boxToLong(unboxCharOrLong(arg1, code1) ^ unboxCharOrLong(arg2, code2)) + else + throw new NoSuchMethodException() + } + } + + def takeConditionalAnd( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + if (arg1.isInstanceOf[Boolean] && arg2.isInstanceOf[Boolean]) { + boxToBoolean(arg1.asInstanceOf[Boolean] && arg2.asInstanceOf[Boolean]) + } else { + throw new NoSuchMethodException() + } + } + + def takeConditionalOr( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + if (arg1.isInstanceOf[Boolean] && arg2.isInstanceOf[Boolean]) { + boxToBoolean(arg1.asInstanceOf[Boolean] || arg2.asInstanceOf[Boolean]) + } else { + throw new NoSuchMethodException() + } + } + + def complement(arg: java.lang.Object): java.lang.Object = { + val code = typeCode(arg) + if (code <= INT) { + boxToInteger(~unboxCharOrInt(arg, code)) + } else if (code <= LONG) { + boxToLong(~unboxCharOrLong(arg, code)) + } else { + throw new NoSuchMethodException() + } + } + + def takeNot(arg: java.lang.Object): java.lang.Object = { + if (arg.isInstanceOf[Boolean]) { + boxToBoolean(!arg.asInstanceOf[Boolean]) + } else { + throw new NoSuchMethodException() + } + } + + def testEqual( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + boxToBoolean(arg1 == arg2) + } + + def testNotEqual( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + boxToBoolean(arg1 != arg2) + } + + def testLessThan( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + val maxcode = if (code1 < code2) code2 else code1 + if (maxcode <= INT) { + val val1 = unboxCharOrInt(arg1, code1) + val val2 = unboxCharOrInt(arg2, code2) + boxToBoolean(val1 < val2) + } else if (maxcode <= LONG) { + val val1 = unboxCharOrLong(arg1, code1) + val val2 = unboxCharOrLong(arg2, code2) + boxToBoolean(val1 < val2) + } else if (maxcode <= FLOAT) { + val val1 = unboxCharOrFloat(arg1, code1) + val val2 = unboxCharOrFloat(arg2, code2) + boxToBoolean(val1 < val2) + } else if (maxcode <= DOUBLE) { + val val1 = unboxCharOrDouble(arg1, code1) + val val2 = unboxCharOrDouble(arg2, code2) + boxToBoolean(val1 < val2) + } else { + throw new NoSuchMethodException() + } + } + + def testLessOrEqualThan( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + val maxcode = if (code1 < code2) code2 else code1 + if (maxcode <= INT) { + val val1 = unboxCharOrInt(arg1, code1) + val val2 = unboxCharOrInt(arg2, code2) + boxToBoolean(val1 <= val2) + } else if (maxcode <= LONG) { + val val1 = unboxCharOrLong(arg1, code1) + val val2 = unboxCharOrLong(arg2, code2) + boxToBoolean(val1 <= val2) + } else if (maxcode <= FLOAT) { + val val1 = unboxCharOrFloat(arg1, code1) + val val2 = unboxCharOrFloat(arg2, code2) + boxToBoolean(val1 <= val2) + } else if (maxcode <= DOUBLE) { + val val1 = unboxCharOrDouble(arg1, code1) + val val2 = unboxCharOrDouble(arg2, code2) + boxToBoolean(val1 <= val2) + } else { + throw new NoSuchMethodException() + } + } + + def testGreaterOrEqualThan( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + val maxcode = if (code1 < code2) code2 else code1 + if (maxcode <= INT) { + val val1 = unboxCharOrInt(arg1, code1) + val val2 = unboxCharOrInt(arg2, code2) + boxToBoolean(val1 >= val2) + } else if (maxcode <= LONG) { + val val1 = unboxCharOrLong(arg1, code1) + val val2 = unboxCharOrLong(arg2, code2) + boxToBoolean(val1 >= val2) + } else if (maxcode <= FLOAT) { + val val1 = unboxCharOrFloat(arg1, code1) + val val2 = unboxCharOrFloat(arg2, code2) + boxToBoolean(val1 >= val2) + } else if (maxcode <= DOUBLE) { + val val1 = unboxCharOrDouble(arg1, code1) + val val2 = unboxCharOrDouble(arg2, code2) + boxToBoolean(val1 >= val2) + } else { + throw new NoSuchMethodException() + } + } + + def testGreaterThan( + arg1: java.lang.Object, + arg2: java.lang.Object + ): java.lang.Object = { + val code1 = typeCode(arg1) + val code2 = typeCode(arg2) + val maxcode = if (code1 < code2) code2 else code1 + if (maxcode <= INT) { + val val1 = unboxCharOrInt(arg1, code1) + val val2 = unboxCharOrInt(arg2, code2) + boxToBoolean(val1 > val2) + } else if (maxcode <= LONG) { + val val1 = unboxCharOrLong(arg1, code1) + val val2 = unboxCharOrLong(arg2, code2) + boxToBoolean(val1 > val2) + } else if (maxcode <= FLOAT) { + val val1 = unboxCharOrFloat(arg1, code1) + val val2 = unboxCharOrFloat(arg2, code2) + boxToBoolean(val1 > val2) + } else if (maxcode <= DOUBLE) { + val val1 = unboxCharOrDouble(arg1, code1) + val val2 = unboxCharOrDouble(arg2, code2) + boxToBoolean(val1 > val2) + } else { + throw new NoSuchMethodException() + } + } + + def isBoxedNumberOrBoolean(arg: java.lang.Object): Boolean = { + arg.isInstanceOf[java.lang.Boolean] || isBoxedNumber(arg) + } + + def isBoxedNumber(arg: java.lang.Object): Boolean = { + arg match { + case _: java.lang.Integer | _: java.lang.Long | _: java.lang.Double | + _: java.lang.Float | _: java.lang.Short | _: java.lang.Character | + _: java.lang.Byte => + true + case _ => false + } + } + + def toCharacter(arg: java.lang.Object): java.lang.Character = arg match { + case int: java.lang.Integer => boxToCharacter(int.toChar) + case short: java.lang.Short => boxToCharacter(short.toChar) + case char: java.lang.Character => char + case long: java.lang.Long => boxToCharacter(long.toChar) + case byte: java.lang.Byte => boxToCharacter(byte.toChar) + case float: java.lang.Float => boxToCharacter(float.toChar) + case double: java.lang.Double => boxToCharacter(double.toChar) + case _ => throw new NoSuchMethodException() + } + + def toByte(arg: java.lang.Object): java.lang.Byte = arg match { + case int: java.lang.Integer => boxToByte(int.toByte) + case char: java.lang.Character => boxToByte(char.toByte) + case byte: java.lang.Byte => byte + case long: java.lang.Long => boxToByte(long.toByte) + case short: java.lang.Short => boxToByte(short.toByte) + case float: java.lang.Float => boxToByte(float.toByte) + case double: java.lang.Double => boxToByte(double.toByte) + case _ => throw new NoSuchMethodException() + } + + def toShort(arg: java.lang.Object): java.lang.Short = arg match { + case int: java.lang.Integer => boxToShort(int.toShort) + case long: java.lang.Long => boxToShort(long.toShort) + case char: java.lang.Character => boxToShort(char.toShort) + case byte: java.lang.Byte => boxToShort(byte.toShort) + case short: java.lang.Short => short + case float: java.lang.Float => boxToShort(float.toShort) + case double: java.lang.Double => boxToShort(double.toShort) + case _ => throw new NoSuchMethodException() + } + + def toInteger(arg: java.lang.Object): java.lang.Integer = arg match { + case int: java.lang.Integer => int + case long: java.lang.Long => boxToInteger(long.toInt) + case double: java.lang.Double => boxToInteger(double.toInt) + case float: java.lang.Float => boxToInteger(float.toInt) + case char: java.lang.Character => boxToInteger(char.toInt) + case byte: java.lang.Byte => boxToInteger(byte.toInt) + case short: java.lang.Short => boxToInteger(short.toInt) + case _ => throw new NoSuchMethodException() + } + + def toLong(arg: java.lang.Object): java.lang.Long = arg match { + case int: java.lang.Integer => boxToLong(int.toLong) + case double: java.lang.Double => boxToLong(double.toLong) + case float: java.lang.Float => boxToLong(float.toLong) + case long: java.lang.Long => long + case char: java.lang.Character => boxToLong(char.toLong) + case byte: java.lang.Byte => boxToLong(byte.toLong) + case short: java.lang.Short => boxToLong(short.toLong) + case _ => throw new NoSuchMethodException() + } + + def toFloat(arg: java.lang.Object): java.lang.Float = arg match { + case int: java.lang.Integer => boxToFloat(int.toFloat) + case long: java.lang.Long => boxToFloat(long.toFloat) + case float: java.lang.Float => float + case double: java.lang.Double => boxToFloat(double.toFloat) + case char: java.lang.Character => boxToFloat(char.toFloat) + case byte: java.lang.Byte => boxToFloat(byte.toFloat) + case short: java.lang.Short => boxToFloat(short.toFloat) + case _ => throw new NoSuchMethodException() + } + + def toDouble(arg: java.lang.Object): java.lang.Double = arg match { + case int: java.lang.Integer => boxToDouble(int.toDouble) + case float: java.lang.Float => boxToDouble(float.toDouble) + case double: java.lang.Double => double + case long: java.lang.Long => boxToDouble(long.toDouble) + case char: java.lang.Character => boxToDouble(char.toDouble) + case byte: java.lang.Byte => boxToDouble(byte.toDouble) + case short: java.lang.Short => boxToDouble(short.toDouble) + case _ => throw new NoSuchMethodException() + } + // Intrinsified as primitives. They are never called. def hashFromObject(o: java.lang.Object): Int = ??? def hashFromNumber(o: java.lang.Number): Int = ??? diff --git a/auxlib/src/main/scala/scala/runtime/Statics.scala b/auxlib/src/main/scala/scala/runtime/Statics.scala index f3200128ef..43b489d85d 100644 --- a/auxlib/src/main/scala/scala/runtime/Statics.scala +++ b/auxlib/src/main/scala/scala/runtime/Statics.scala @@ -1,5 +1,7 @@ package scala.runtime +/** Not for public consumption. Usage by the runtime only. + */ object Statics { @inline def mix(hash: Int, data: Int): Int = { val h1 = mixLast(hash, data) @@ -68,18 +70,23 @@ object Statics { @inline def anyHash(x: Object): Int = x match { case null => 0 + case x: java.lang.Number => anyHashNumber(x) + case _ => x.hashCode + } + + @inline private def anyHashNumber(x: java.lang.Number): Int = x match { case x: java.lang.Long => longHash(x.longValue) case x: java.lang.Double => doubleHash(x.doubleValue) case x: java.lang.Float => floatHash(x.floatValue) - case _ => x.hashCode + case _ => x.hashCode() } /** Used as a marker object to return from PartialFunctions */ - def pfMarker: AnyRef = PFMarker + @inline final def pfMarker: java.lang.Object = PFMarker - private object PFMarker extends AnyRef + private object PFMarker - def releaseFence(): Unit = () + @inline def releaseFence(): Unit = () /** Just throws an exception. * diff --git a/build.sbt b/build.sbt index f25431323d..429eb6a1d3 100644 --- a/build.sbt +++ b/build.sbt @@ -8,6 +8,7 @@ lazy val sbtScalaNative = Build.sbtScalaNative lazy val nir = Build.nir lazy val util = Build.util lazy val tools = Build.tools +lazy val toolsBenchmarks = Build.toolsBenchmarks lazy val nativelib = Build.nativelib lazy val clib = Build.clib lazy val posixlib = Build.posixlib diff --git a/docs/changelog/0.4.12.md b/docs/changelog/0.4.12.md index 879cc024e4..753368c405 100644 --- a/docs/changelog/0.4.12.md +++ b/docs/changelog/0.4.12.md @@ -1,3 +1,5 @@ +# 0.4.12 (2023-03-22) + We're happy to announce the release of Scala Native. It's the next maintenance release for Scala Native 0.4.x. This release fixes regressions introduced in the previous version and adds some requested features. diff --git a/docs/changelog/0.4.13.md b/docs/changelog/0.4.13.md new file mode 100644 index 0000000000..82aa8d5111 --- /dev/null +++ b/docs/changelog/0.4.13.md @@ -0,0 +1,114 @@ +# 0.4.13 (2023-06-05) + +We're happy to announce the release of Scala Native 0.4.13! + +This release does not introduce any major improvements, but comes with variety of bugfixes. Scala Native 0.4.13 also updates the underlying Scala standard library, allowing to use its improvements in Scala 2.12, 2.13 and new definitions introduced in Scala 3.3. + + +Scala standard library used by this release is based on the following versions: + + + + + + + + + + + + + + + + + + + +
Scala binary versionScala release
2.122.12.18
2.132.13.11
33.3.0
+ + + + + + + + + + + + + + + + +
Commits since last release38
Merged PRs22
Contributors11
+ +## Contributors + +Big thanks to everybody who contributed to this release or reported an issue! + +``` +$ git shortlog -sn --no-merges v0.4.12.. + 20 Wojciech Mazur + 5 LeeTibbert + 3 Rikito Taniguchi + 2 Abdullah Sabaa Allil + 2 Arman Bilge + 1 Eric K Richardson + 1 Jarek Sacha + 1 Lorenzo Gabriele + 1 Vincent Lafeychine + 1 ankusharya + 1 kim / Motoyuki Kimura +``` + +## Merged PRs + +## [v0.4.13](https://github.com/scala-native/scala-native/tree/) (2023-06-03) + +[Full Changelog](https://github.com/scala-native/scala-native/compare/v0.4.12...v0.4.13) + +**Merged pull requests:** + +### Compiler plugin +- Fix handling empty list of var args for extern method in Scala 2.13 + [\#3240](https://github.com/scala-native/scala-native/pull/3240) + ([WojciechMazur](https://github.com/WojciechMazur)) +- Report error when extern definition is marked as inlined + [\#3241](https://github.com/scala-native/scala-native/pull/3241) + ([WojciechMazur](https://github.com/WojciechMazur)) + +### Scala Native runtime +- Fix JUnit class cast when comparing Float/Double arrays + [\#3249](https://github.com/scala-native/scala-native/pull/3249) + ([WojciechMazur](https://github.com/WojciechMazur)) +- Fix referential equality of scala.reflect.ClassTag by caching underlying Manifest instances + [\#3256](https://github.com/scala-native/scala-native/pull/3256) + ([WojciechMazur](https://github.com/WojciechMazur)) +- Improved linktime resolved definitions + [\#3266](https://github.com/scala-native/scala-native/pull/3266) + ([WojciechMazur](https://github.com/WojciechMazur)) +- Expose parsed target triple in `LinktimeInfo` + [\#3258](https://github.com/scala-native/scala-native/pull/3258) + ([armanbilge](https://github.com/armanbilge)) +- Expose heap size information to the user + [\#3275](https://github.com/scala-native/scala-native/pull/3275) + ([Abdullahsab3](https://github.com/Abdullahsab3)) +- Use `Array[Int]` instead of `Array[Array[Boolean]]` for `[class,trait]_has_trait` + [\#3279](https://github.com/scala-native/scala-native/pull/3279) + ([lolgab](https://github.com/lolgab)) +- Backport `uioOps` to 0.4.x + [\#3259](https://github.com/scala-native/scala-native/pull/3259) + ([armanbilge](https://github.com/armanbilge)) + +### Java Standard Library +- Fix handling of Path.relativze on Windows + [/#3299](https://github.com/scala-native/scala-native/pull/3299) + ([jpsacha](https://github.com/jpsacha)) +- Provide an evolution of Scala Native support for the Java Stream API + [\#3268](https://github.com/scala-native/scala-native/pull/3268) + ([LeeTibbert](https://github.com/LeeTibbert)) +- Character.isWhitespace(-1) returns false + [\#3284](https://github.com/scala-native/scala-native/pull/3284) + ([tanishiking](https://github.com/tanishiking)) diff --git a/docs/changelog/0.4.14.md b/docs/changelog/0.4.14.md new file mode 100644 index 0000000000..22288e2f6e --- /dev/null +++ b/docs/changelog/0.4.14.md @@ -0,0 +1,29 @@ +# 0.4.14 (2023-06-06) + +We're happy to announce the release of Scala Native 0.4.14! + +This patch version fixes backward-compatibility problems introduced in Scala Native 0.4.13. +See [changelog of version 0.4.13](0.4.13.md) for more information. + +Scala standard library used by this release is based on the following versions: + + + + + + + + + + + + + + + + + + + +
Scala binary versionScala release
2.122.12.18
2.132.13.11
33.3.0
+ diff --git a/docs/changelog/index.rst b/docs/changelog/index.rst index 7f8d59ab1a..30cc812ce9 100644 --- a/docs/changelog/index.rst +++ b/docs/changelog/index.rst @@ -6,6 +6,8 @@ Changelog .. toctree:: :maxdepth: 1 + 0.4.14 + 0.4.13 0.4.12 0.4.11 0.4.10 diff --git a/docs/conf.py b/docs/conf.py index 6a18c37867..074389c321 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -69,9 +69,9 @@ def generateScalaNativeCurrentYear(): # built documents. # # The short X.Y version. -version = u'0.4.12' +version = u'0.4.14' # The full version, including alpha/beta/rc tags. -release = u'0.4.12' +release = u'0.4.14' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/docs/contrib/build.rst b/docs/contrib/build.rst index 21d2097b68..ec97933f4b 100644 --- a/docs/contrib/build.rst +++ b/docs/contrib/build.rst @@ -140,11 +140,11 @@ If you need to test your copy of Scala Native in the larger context of a separate build, you will need to locally publish all the artifacts of Scala Native. -Use the special script that publishes all the cross versions: +Use the custom sbt command to publish all projects for a specific Scala version (`x,y,z`): .. code-block:: text - $ scripts/publish-local + > publish-local-dev x.y.z Afterwards, set the version of `sbt-scala-native` in the target project's `project/plugins.sbt` to the current SNAPSHOT version of Scala Native, and use @@ -246,12 +246,14 @@ onto source defined for the current Scala version inside its standard library. In case `overrides*` directory contains both `*.scala` file and its corresponding patch file, only `*.scala` file would be added to the compilation sources. -To operate with patches it is recommended to use Ammonite script `scripts/scalalib-patch-tool.sc`. +To operate with patches it is recommended to use ScalaCLI script `scripts/scalalib-patch-tool.sc`. It takes 2 mandatory arguments: command to use and Scala version. There are currently 3 supported commands defined: * recreate - creates `*.scala` files based on original sources with applied patches corresponding to their name; * create - creates `*.scala.patch` files from defined `*.scala` files in overrides directory with corresponding name; * prune - deletes all `*.scala` files which does not have corresponding `*.scala.patch` file; +(e.g. `scala-cli scripts/scalalib-patch-tool.sc -- recreate 2.13.10`) + Each of these commands is applied to all files defined in the overrides directory. By default override directory is selected based on the used scala version, if it's not the present script will try to use directory with corresponding Scala binary version, diff --git a/docs/contrib/contributing.rst b/docs/contrib/contributing.rst index de598a6cea..a00b2dffaf 100644 --- a/docs/contrib/contributing.rst +++ b/docs/contrib/contributing.rst @@ -226,8 +226,8 @@ followed by details of the commit, in the form of free text, or bulleted list. .. _Apache Harmony project: https://github.com/apache/harmony .. _Scala CLA: http://typesafe.com/contribute/cla/scala .. _Pull Request: https://help.github.com/articles/using-pull-requests -.. _DRY: http://programmer.97things.oreilly.com/wiki/index.php/Don%27t_Repeat_Yourself -.. _Boy Scout Rule: http://programmer.97things.oreilly.com/wiki/index.php/The_Boy_Scout_Rule +.. _DRY: https://www.oreilly.com/library/view/97-things-every/9780596809515/ch30.html +.. _Boy Scout Rule: https://www.oreilly.com/library/view/97-things-every/9780596809515/ch08.html .. _Git Workflow: http://sandofsky.com/blog/git-workflow.html .. _GPL and Scala License are compatible: https://www.gnu.org/licenses/license-list.html#ModifiedBSD .. _GPL and Scala CLA are compatible: https://www.gnu.org/licenses/license-list.html#apache2 diff --git a/docs/lib/javalib.rst b/docs/lib/javalib.rst index dbb197b2f4..c78d8241af 100644 --- a/docs/lib/javalib.rst +++ b/docs/lib/javalib.rst @@ -385,6 +385,7 @@ java.util * ``Date`` * ``Deque`` * ``Dictionary`` +* ``DoubleSummaryStatistics`` * ``DuplicateFormatFlagsException`` * ``EmptyStackException`` * ``EnumSet`` @@ -409,21 +410,30 @@ java.util * ``IllformedLocaleException`` * ``InputMismatchException`` * ``InvalidPropertiesFormatException`` +* ``IntSummaryStatistics`` * ``Iterator`` * ``LinkedHashMap`` * ``LinkedHashSet`` * ``LinkedList`` * ``List`` * ``ListIterator`` +* ``LongSummaryStatistics`` * ``MissingFormatArgumentException`` * ``MissingFormatWidthException`` * ``MissingResourceException`` +* ``Map`` * ``NavigableMap`` * ``NavigableSet`` * ``NoSuchElementException`` * ``Objects`` * ``Optional`` +* ``OptionalDouble`` +* ``OptionalInt`` +* ``OptionalLong`` * ``PrimitiveIterator`` +* ``PrimitiveIterator.OfDouble`` +* ``PrimitiveIterator.OfInt`` +* ``PrimitiveIterator.OfLong`` * ``PriorityQueue`` * ``Properties`` * ``Queue`` @@ -444,77 +454,100 @@ java.util * ``UnknownFormatConversionException`` * ``UnknownFormatFlagsException`` * ``WeakHashMap`` -* ``concurrent.Callable`` -* ``concurrent.CancellationException`` -* ``concurrent.ConcurrentHashMap`` -* ``concurrent.ConcurrentHashMap.KeySetView`` -* ``concurrent.ConcurrentLinkedQueue`` -* ``concurrent.ConcurrentMap`` -* ``concurrent.ConcurrentSkipListSet`` -* ``concurrent.ExecutionException`` -* ``concurrent.Executor`` -* ``concurrent.RejectedExecutionException`` -* ``concurrent.Semaphore`` -* ``concurrent.ThreadFactory`` -* ``concurrent.ThreadLocalRandom`` -* ``concurrent.TimeUnit`` -* ``concurrent.TimeoutException`` -* ``concurrent.atomic.AtomicBoolean`` -* ``concurrent.atomic.AtomicInteger`` -* ``concurrent.atomic.AtomicLong`` -* ``concurrent.atomic.AtomicLongArray`` -* ``concurrent.atomic.AtomicReference`` -* ``concurrent.atomic.AtomicReferenceArray`` -* ``concurrent.atomic.LongAdder`` -* ``concurrent.locks.AbstractOwnableSynchronizer`` -* ``concurrent.locks.AbstractQueuedSynchronizer`` -* ``concurrent.locks.Lock`` -* ``concurrent.locks.ReentrantLock`` -* ``function.BiConsumer`` -* ``function.BiFunction`` -* ``function.BiPredicate`` -* ``function.BinaryOperator`` -* ``function.Consumer`` -* ``function.Function`` -* ``function.IntUnaryOperator`` -* ``function.Predicate`` -* ``function.Supplier`` -* ``function.UnaryOperator`` -* ``jar.Attributes`` -* ``jar.Attributes.Name`` -* ``jar.InitManifest`` -* ``jar.JarEntry`` -* ``jar.JarFile`` -* ``jar.JarInputStream`` -* ``jar.JarOutputStream`` -* ``jar.Manifest`` -* ``regex.MatchResult`` -* ``regex.Matcher`` -* ``regex.Pattern`` -* ``regex.PatternSyntaxException`` -* ``stream.BaseStream`` -* ``stream.CompositeStream`` -* ``stream.EmptyIterator`` -* ``stream.Stream`` -* ``stream.Stream.Builder`` -* ``zip.Adler32`` -* ``zip.CRC32`` -* ``zip.CheckedInputStream`` -* ``zip.CheckedOutputStream`` -* ``zip.Checksum`` -* ``zip.DataFormatException`` -* ``zip.Deflater`` -* ``zip.DeflaterOutputStream`` -* ``zip.GZIPInputStream`` -* ``zip.GZIPOutputStream`` -* ``zip.Inflater`` -* ``zip.InflaterInputStream`` -* ``zip.ZipConstants`` -* ``zip.ZipEntry`` -* ``zip.ZipException`` -* ``zip.ZipFile`` -* ``zip.ZipInputStream`` -* ``zip.ZipOutputStream`` + +java.util.concurrent +"""""""""""""""""""" +* ``Callable`` +* ``CancellationException`` +* ``ConcurrentHashMap`` +* ``ConcurrentHashMap.KeySetView`` +* ``ConcurrentLinkedQueue`` +* ``ConcurrentMap`` +* ``ConcurrentSkipListSet`` +* ``ExecutionException`` +* ``Executor`` +* ``RejectedExecutionException`` +* ``Semaphore`` +* ``ThreadFactory`` +* ``ThreadLocalRandom`` +* ``TimeUnit`` +* ``TimeoutException`` +* ``atomic.AtomicBoolean`` +* ``atomic.AtomicInteger`` +* ``atomic.AtomicLong`` +* ``atomic.AtomicLongArray`` +* ``atomic.AtomicReference`` +* ``atomic.AtomicReferenceArray`` +* ``atomic.LongAdder`` +* ``locks.AbstractOwnableSynchronizer`` +* ``locks.AbstractQueuedSynchronizer`` +* ``locks.Lock`` +* ``locks.ReentrantLock`` + +java.util.function +"""""""""""""""""" +* ``BiConsumer`` +* ``BiFunction`` +* ``BiPredicate`` +* ``BinaryOperator`` +* ``Consumer`` +* ``Function`` +* ``IntUnaryOperator`` +* ``Predicate`` +* ``Supplier`` +* ``UnaryOperator`` + +java.util.jar +""""""""""""" +* ``Attributes`` +* ``Attributes.Name`` +* ``InitManifest`` +* ``JarEntry`` +* ``JarFile`` +* ``JarInputStream`` +* ``JarOutputStream`` +* ``Manifest`` + +java.util.regex +""""""""""""""" +* ``MatchResult`` +* ``Matcher`` +* ``Pattern`` +* ``PatternSyntaxException`` + +java.util.stream +"""""""""""""""" +* ``BaseStream`` +* ``Collector`` +* ``Collector.Characteristics`` +* ``Collectors`` +* ``DoubleStream`` +* ``DoubleStream.Builder`` +* ``DoubleStream.DoubleMapMultiConsumer`` +* ``Stream`` +* ``Stream.Builder`` +* ``StreamSupport`` + +java.util.zip +""""""""""""" +* ``Adler32`` +* ``CRC32`` +* ``CheckedInputStream`` +* ``CheckedOutputStream`` +* ``Checksum`` +* ``DataFormatException`` +* ``Deflater`` +* ``DeflaterOutputStream`` +* ``GZIPInputStream`` +* ``GZIPOutputStream`` +* ``Inflater`` +* ``InflaterInputStream`` +* ``ZipConstants`` +* ``ZipEntry`` +* ``ZipException`` +* ``ZipFile`` +* ``ZipInputStream`` +* ``ZipOutputStream`` **Note:** This is an ongoing effort, some of the classes listed here might diff --git a/docs/user/testing.rst b/docs/user/testing.rst index e85c122fc4..52381750fc 100644 --- a/docs/user/testing.rst +++ b/docs/user/testing.rst @@ -21,8 +21,7 @@ To enable JUnit support, add the following lines to your `build.sbt` file: .. parsed-literal:: - libraryDependencies += "org.scala-native" %%% "junit-runtime" % |release| - addCompilerPlugin("org.scala-native" % "junit-plugin" % |release| cross CrossVersion.full) + enablePlugins(ScalaNativeJUnitPlugin) If you want to get more detailed output from the JUnit runtime, also include the following line: diff --git a/javalib/src/main/scala-2.12/scala/scalanative/compat/ScalaStream.scala b/javalib/src/main/scala-2.12/scala/scalanative/compat/ScalaStream.scala deleted file mode 100644 index 054d96476e..0000000000 --- a/javalib/src/main/scala-2.12/scala/scalanative/compat/ScalaStream.scala +++ /dev/null @@ -1,20 +0,0 @@ -package scala.scalanative.compat - -import java.util.stream.WrappedScalaStream -import scala.collection.immutable -import scala.language.implicitConversions - -private[scalanative] object ScalaStream { - type Underlying[T] = immutable.Stream[T] - val Underlying = immutable.Stream - - implicit class ScalaStreamImpl[T](val underyling: Underlying[T]) - extends AnyVal { - def wrappedStream(closeHanlder: Option[Runnable] = None) = - new WrappedScalaStream[T](underyling, closeHanlder) - } - - implicit def seqToScalaStream[T](seq: Iterable[T]): Underlying[T] = { - seq.to[Underlying] - } -} diff --git a/javalib/src/main/scala-2.13/scala/scalanative/compat/ScalaStream.scala b/javalib/src/main/scala-2.13/scala/scalanative/compat/ScalaStream.scala deleted file mode 100644 index 2915dd9c66..0000000000 --- a/javalib/src/main/scala-2.13/scala/scalanative/compat/ScalaStream.scala +++ /dev/null @@ -1,21 +0,0 @@ -package scala.scalanative.compat - -import java.util.stream.WrappedScalaStream -import scala.collection.immutable -import scala.language.implicitConversions - -private[scalanative] object ScalaStream { - type Underlying[T] = immutable.LazyList[T] - val Underlying = immutable.LazyList - - implicit class ScalaStreamImpl[T](val underyling: Underlying[T]) - extends AnyVal { - def wrappedStream(closeHanlder: Option[Runnable] = None) = - new WrappedScalaStream[T](underyling, closeHanlder) - } - - implicit def seqToScalaStream[T](seq: Iterable[T]): Underlying[T] = { - seq.to(Underlying) - } - -} diff --git a/javalib/src/main/scala-2/java/lang/Enum.scala b/javalib/src/main/scala-2/java/lang/Enum.scala deleted file mode 100644 index 842ea666c8..0000000000 --- a/javalib/src/main/scala-2/java/lang/Enum.scala +++ /dev/null @@ -1,13 +0,0 @@ -// Classes in this file need special handling in Scala 3, we need to make sure -// that they would not be compiled with Scala 3 compiler - -package java.lang - -abstract class Enum[E <: Enum[E]] protected (_name: String, _ordinal: Int) - extends Comparable[E] - with java.io.Serializable { - def name(): String = _name - def ordinal(): Int = _ordinal - override def toString(): String = _name - final def compareTo(o: E): Int = _ordinal.compareTo(o.ordinal()) -} diff --git a/javalib/src/main/scala-3/scala/scalanative/compat/ScalaStream.scala b/javalib/src/main/scala-3/scala/scalanative/compat/ScalaStream.scala deleted file mode 100644 index c2ae072f2f..0000000000 --- a/javalib/src/main/scala-3/scala/scalanative/compat/ScalaStream.scala +++ /dev/null @@ -1,24 +0,0 @@ -// This file defines common wrapper for Scala streams -// to allow for cross-compilation between Scala 2.12- and Scala 2.13+ -// due to changes to collections API used in the javalib. -package scala.scalanative.compat - -import java.util.stream.WrappedScalaStream -import scala.collection.immutable -import scala.language.implicitConversions - -private[scalanative] object ScalaStream { - type Underlying[T] = immutable.LazyList[T] - val Underlying = immutable.LazyList - - implicit class ScalaStreamImpl[T](val underyling: Underlying[T]) - extends AnyVal { - def wrappedStream(closeHanlder: Option[Runnable] = None) = - new WrappedScalaStream[T](underyling, closeHanlder) - } - - implicit def seqToScalaStream[T](seq: Iterable[T]): Underlying[T] = { - seq.to(Underlying) - } - -} diff --git a/javalib/src/main/scala/java/io/BufferedReader.scala b/javalib/src/main/scala/java/io/BufferedReader.scala index 93026c90f1..7ad3ed48a2 100644 --- a/javalib/src/main/scala/java/io/BufferedReader.scala +++ b/javalib/src/main/scala/java/io/BufferedReader.scala @@ -1,7 +1,8 @@ package java.io -import scala.scalanative.compat.StreamsCompat._ -import java.util.stream.{Stream, WrappedScalaStream} +import java.util.Spliterators +import java.util.function.Consumer +import java.util.stream.{Stream, StreamSupport} class BufferedReader(in: Reader, sz: Int) extends Reader { @@ -127,8 +128,25 @@ class BufferedReader(in: Reader, sz: Int) extends Reader { def lines(): Stream[String] = lines(false) - private[java] def lines(closeAtEnd: Boolean): Stream[String] = - new WrappedScalaStream(toScalaStream(closeAtEnd), None) + private[java] def lines(closeAtEnd: Boolean): Stream[String] = { + val spliter = + new Spliterators.AbstractSpliterator[String](Long.MaxValue, 0) { + def tryAdvance(action: Consumer[_ >: String]): Boolean = { + readLine() match { + case null => + if (closeAtEnd) + close() + false + + case line => + action.accept(line) + true + } + } // tryAdvance + } + + StreamSupport.stream(spliter, parallel = false) + } /** Prepare the buffer for reading. Returns false if EOF */ private def prepareRead(): Boolean = @@ -161,16 +179,4 @@ class BufferedReader(in: Reader, sz: Int) extends Reader { pos += 1 } } - - private[this] def toScalaStream(closeAtEnd: Boolean): SStream[String] = { - Option(readLine()) match { - case None => - if (closeAtEnd) { - close() - } - SStream.empty - case Some(line) => line #:: toScalaStream(closeAtEnd) - } - } - } diff --git a/javalib/src/main/scala/java/lang/Character.scala b/javalib/src/main/scala/java/lang/Character.scala index 8bb0780891..7c8d1cc179 100644 --- a/javalib/src/main/scala/java/lang/Character.scala +++ b/javalib/src/main/scala/java/lang/Character.scala @@ -544,7 +544,8 @@ object Character { def isWhitespace(codePoint: scala.Int): scala.Boolean = { def isSeparator(tpe: Int): scala.Boolean = tpe == SPACE_SEPARATOR || tpe == LINE_SEPARATOR || tpe == PARAGRAPH_SEPARATOR - if (codePoint < 256) { + if (codePoint < 0) false + else if (codePoint < 256) { codePoint == '\t' || codePoint == '\n' || codePoint == '\u000B' || codePoint == '\f' || codePoint == '\r' || ('\u001C' <= codePoint && codePoint <= '\u001F') || @@ -592,7 +593,8 @@ object Character { isLowerCase(c.toInt) def isLowerCase(c: Int): scala.Boolean = { - if (c < 256) + if (c < 0) false + else if (c < 256) c == '\u00AA' || c == '\u00BA' || getTypeLT256(c) == LOWERCASE_LETTER else isLowerCaseGE256(c) diff --git a/javalib/src/main/scala-3/java/lang/Enum.scala b/javalib/src/main/scala/java/lang/Enum.scala similarity index 100% rename from javalib/src/main/scala-3/java/lang/Enum.scala rename to javalib/src/main/scala/java/lang/Enum.scala diff --git a/javalib/src/main/scala/java/nio/file/Files.scala b/javalib/src/main/scala/java/nio/file/Files.scala index 0cd8101c4b..fd3567bde8 100644 --- a/javalib/src/main/scala/java/nio/file/Files.scala +++ b/javalib/src/main/scala/java/nio/file/Files.scala @@ -1,49 +1,34 @@ package java.nio.file +import java.io._ import java.lang.Iterable -import java.io.{ - BufferedReader, - BufferedWriter, - File, - FileOutputStream, - IOException, - InputStream, - InputStreamReader, - OutputStream, - OutputStreamWriter, - StringWriter, - UncheckedIOException -} -import java.nio.file.attribute._ import java.nio.charset.{Charset, StandardCharsets} -import java.nio.channels.{FileChannel, SeekableByteChannel} +import java.nio.channels.SeekableByteChannel +import java.nio.file.attribute._ +import java.nio.file.StandardCopyOption.{COPY_ATTRIBUTES, REPLACE_EXISTING} + +import java.util._ import java.util.function.BiPredicate -import java.util.{ - EnumSet, - HashMap, - HashSet, - Iterator, - LinkedList, - List, - Map, - Set -} -import java.util.stream.{Stream, WrappedScalaStream} +import java.util.stream.Stream import scala.annotation.tailrec import scalanative.unsigned._ import scalanative.unsafe._ import scalanative.libc._ + +import java.nio.channels.SeekableByteChannel import scalanative.libc.errno.errno -import scalanative.posix.{dirent, fcntl, limits, unistd} -import dirent._ import scalanative.posix.errno.{EEXIST, ENOENT, ENOTEMPTY} +import scalanative.posix.{fcntl, limits, unistd} +import scalanative.posix.sys.stat -import java.nio.file.StandardCopyOption.{COPY_ATTRIBUTES, REPLACE_EXISTING} +import scalanative.meta.LinktimeInfo.isWindows + +import scalanative.nio.fs.FileHelpers import scalanative.nio.fs.unix.UnixException -import scalanative.posix.sys.stat + import scalanative.windows._ import scalanative.windows.WinBaseApi._ import scalanative.windows.WinBaseApiExt._ @@ -51,16 +36,8 @@ import scalanative.windows.FileApiExt._ import scalanative.windows.ErrorHandlingApi._ import scalanative.windows.winnt.AccessRights._ import java.util.WindowsHelperMethods._ -import scalanative.nio.fs.FileHelpers -import scalanative.compat.StreamsCompat._ -import scalanative.meta.LinktimeInfo.isWindows -import scala.collection.immutable.{Map => SMap, Set => SSet} -import java.io.FileNotFoundException object Files { - - private final val `1U` = 1.toUInt - private final val emptyPath = Paths.get("", Array.empty) // def getFileStore(path: Path): FileStore @@ -405,22 +382,24 @@ object Files { options: Array[FileVisitOption] ): Stream[Path] = { val nofollow = Array(LinkOption.NOFOLLOW_LINKS) - val stream = walk(start, maxDepth, 0, options, SSet.empty).filter { p => - val brokenSymLink = - if (isSymbolicLink(p)) { - val target = readSymbolicLink(p) - val targetExists = exists(target, nofollow) - !targetExists - } else false - val linkOpts = - if (!brokenSymLink) linkOptsFromFileVisitOpts(options) else nofollow - val attributes = - getFileAttributeView(p, classOf[BasicFileAttributeView], linkOpts) - .readAttributes() - - matcher.test(p, attributes) - } - new WrappedScalaStream(stream, None) + val stream = + walk(start, maxDepth, 0, options, new HashSet[Path]()).filter { p => + val brokenSymLink = + if (isSymbolicLink(p)) { + val target = readSymbolicLink(p) + val targetExists = exists(target, nofollow) + !targetExists + } else false + val linkOpts = + if (!brokenSymLink) linkOptsFromFileVisitOpts(options) else nofollow + val attributes = + getFileAttributeView(p, classOf[BasicFileAttributeView], linkOpts) + .readAttributes() + + matcher.test(p, attributes) + } + + stream } def getAttribute( @@ -436,9 +415,11 @@ object Files { attribute.substring(0, sepIndex), attribute.substring(sepIndex + 1, attribute.length) ) - val viewClass = viewNamesToClasses - .get(viewName) - .getOrElse(throw new UnsupportedOperationException()) + val viewClass = { + if (!viewNamesToClasses.containsKey(viewName)) + throw new UnsupportedOperationException() + viewNamesToClasses.get(viewName) + } val view = getFileAttributeView(path, viewClass, options) view.getAttribute(attrName) } @@ -548,10 +529,7 @@ object Files { if (dir.equals(emptyPath)) "./" else dir.toString() - new WrappedScalaStream( - FileHelpers.list(dirString, (n, _) => dir.resolve(n)).toScalaStream, - None - ) + Arrays.stream[Path](FileHelpers.list(dirString, (n, _) => dir.resolve(n))) } def move(source: Path, target: Path, options: Array[CopyOption]): Path = { @@ -765,9 +743,11 @@ object Files { tpe: Class[A], options: Array[LinkOption] ): A = { - val viewClass = attributesClassesToViews - .get(tpe) - .getOrElse(throw new UnsupportedOperationException()) + val viewClass = { + if (!attributesClassesToViews.containsKey(tpe)) + throw new UnsupportedOperationException() + attributesClassesToViews.get(tpe) + } val view = getFileAttributeView(path, viewClass, options) view.readAttributes().asInstanceOf[A] } @@ -783,9 +763,11 @@ object Files { else (parts(0), parts(1)) if (atts == "*") { - val viewClass = viewNamesToClasses - .get(viewName) - .getOrElse(throw new UnsupportedOperationException()) + val viewClass = { + if (!viewNamesToClasses.containsKey(viewName)) + throw new UnsupportedOperationException() + viewNamesToClasses.get(viewName) + } getFileAttributeView(path, viewClass, options).asMap } else { val attrs = atts.split(",") @@ -848,7 +830,7 @@ object Files { if (unistd.readlink( toCString(link.toString), buf, - limits.PATH_MAX - `1U` + limits.PATH_MAX - 1.toUInt ) == -1) { throw UnixException(link.toString, errno) } @@ -871,9 +853,11 @@ object Files { attribute.substring(0, sepIndex), attribute.substring(sepIndex + 1, attribute.length) ) - val viewClass = viewNamesToClasses - .get(viewName) - .getOrElse(throw new UnsupportedOperationException()) + val viewClass = { + if (!viewNamesToClasses.containsKey(viewName)) + throw new UnsupportedOperationException() + viewNamesToClasses.get(viewName) + } val view = getFileAttributeView(path, viewClass, options) view.setAttribute(attrName, value) path @@ -913,52 +897,73 @@ object Files { start: Path, maxDepth: Int, options: Array[FileVisitOption] - ): Stream[Path] = - new WrappedScalaStream(walk(start, maxDepth, 0, options, Set(start)), None) + ): Stream[Path] = { + val visited = new HashSet[Path]() + visited.add(start) + walk(start, maxDepth, 0, options, visited) + } private def walk( start: Path, maxDepth: Int, currentDepth: Int, options: Array[FileVisitOption], - visited: SSet[Path] - ): SStream[Path] = { - start #:: { - if (!isDirectory(start, linkOptsFromFileVisitOpts(options))) SStream.empty - else { - FileHelpers - .list(start.toString, (n, t) => (n, t)) - .toScalaStream - .flatMap { + visited: Set[Path] // Java Set, gets mutated. Private so no footgun. + ): Stream[Path] = { + /* Design Note: + * This implementation is an update to Java streams of this historical + * Scala stream implementation. It is somewhat inefficient/costly + * in that it converts known single names to a singleton Stream + * and then relies upon flatmap() to merge streams. Creating a + * full blown Stream has some overhead. A less costly implementation + * would be a good use of time. + * + * Some of the historical design is due to the JVM requirements on + * Stream#flatMap. Java 16 introduced Stream#mapMulti which + * relaxes the requirement to create small intermediate streams. + * When Scala Native requires a minimum JDK >= 16, that method + * would fix the problem described. So watchful waiting is + * probably the most economic approach, once the problem is described. + */ + + if (!isDirectory(start, linkOptsFromFileVisitOpts(options))) + Stream.of(start) + else { + Stream.concat( + Stream.of(start), + Arrays + .asList(FileHelpers.list(start.toString, (n, t) => (n, t))) + .stream() + .flatMap[Path] { case (name, FileHelpers.FileType.Link) if options.contains(FileVisitOption.FOLLOW_LINKS) => val path = start.resolve(name) - val newVisited = visited + path + val target = readSymbolicLink(path) - if (newVisited.contains(target)) + + visited.add(path) + + if (visited.contains(target)) throw new UncheckedIOException( new FileSystemLoopException(path.toString) ) else if (!exists(target, Array(LinkOption.NOFOLLOW_LINKS))) - start.resolve(name) #:: SStream.empty + Stream.of(start.resolve(name)) else - walk(path, maxDepth, currentDepth + 1, options, newVisited) + walk(path, maxDepth, currentDepth + 1, options, visited) case (name, FileHelpers.FileType.Directory) if currentDepth < maxDepth => val path = start.resolve(name) - val newVisited = - if (options.contains(FileVisitOption.FOLLOW_LINKS)) - visited + path - else visited - walk(path, maxDepth, currentDepth + 1, options, newVisited) + if (options.contains(FileVisitOption.FOLLOW_LINKS)) + visited.add(path) + walk(path, maxDepth, currentDepth + 1, options, visited) case (name, _) => - start.resolve(name) #:: SStream.empty + Stream.of(start.resolve(name)) } - } + ) } - } def walkFileTree(start: Path, visitor: FileVisitor[_ >: Path]): Path = @@ -999,10 +1004,12 @@ object Files { ): Path = { val nofollow = Array(LinkOption.NOFOLLOW_LINKS) val optsArray = options.toArray(new Array[FileVisitOption](options.size())) - val stream = walk(start, maxDepth, 0, optsArray, SSet.empty) - val dirsToSkip = scala.collection.mutable.Set.empty[Path] + val dirsToSkip = new HashSet[Path] val openDirs = scala.collection.mutable.Stack.empty[Path] - stream.foreach { p => + + val stream = walk(start, maxDepth, 0, optsArray, new HashSet[Path]) + + stream.forEach { p => val parent = p.getParent() if (dirsToSkip.contains(parent)) () @@ -1046,8 +1053,8 @@ object Files { result match { case FileVisitResult.TERMINATE => throw TerminateTraversalException - case FileVisitResult.SKIP_SUBTREE => dirsToSkip += p - case FileVisitResult.SKIP_SIBLINGS => dirsToSkip += parent + case FileVisitResult.SKIP_SUBTREE => dirsToSkip.add(p) + case FileVisitResult.SKIP_SIBLINGS => dirsToSkip.add(parent) case FileVisitResult.CONTINUE => () } @@ -1121,24 +1128,32 @@ object Files { setAttribute(path, name, value.asInstanceOf[AnyRef], Array.empty) } - private val attributesClassesToViews: SMap[Class[ + private val attributesClassesToViews: Map[Class[ _ <: BasicFileAttributes - ], Class[_ <: BasicFileAttributeView]] = - SMap( - classOf[BasicFileAttributes] -> classOf[BasicFileAttributeView], - classOf[DosFileAttributes] -> classOf[DosFileAttributeView], - classOf[PosixFileAttributes] -> classOf[PosixFileAttributeView] - ) + ], Class[_ <: BasicFileAttributeView]] = { + type HMK = Class[_ <: BasicFileAttributes] + type HMV = Class[_ <: BasicFileAttributeView] - private val viewNamesToClasses: SMap[String, Class[_ <: FileAttributeView]] = - SMap( - "acl" -> classOf[AclFileAttributeView], - "basic" -> classOf[BasicFileAttributeView], - "dos" -> classOf[DosFileAttributeView], - "owner" -> classOf[FileOwnerAttributeView], - "user" -> classOf[UserDefinedFileAttributeView], - "posix" -> classOf[PosixFileAttributeView] - ) + val map = new HashMap[HMK, HMV]() + map.put(classOf[BasicFileAttributes], classOf[BasicFileAttributeView]) + map.put(classOf[DosFileAttributes], classOf[DosFileAttributeView]) + map.put(classOf[PosixFileAttributes], classOf[PosixFileAttributeView]) + + map + } + + private val viewNamesToClasses: Map[String, Class[_ <: FileAttributeView]] = { + val map = new HashMap[String, Class[_ <: FileAttributeView]]() + + map.put("acl", classOf[AclFileAttributeView]) + map.put("basic", classOf[BasicFileAttributeView]) + map.put("dos", classOf[DosFileAttributeView]) + map.put("owner", classOf[FileOwnerAttributeView]) + map.put("user", classOf[UserDefinedFileAttributeView]) + map.put("posix", classOf[PosixFileAttributeView]) + + map + } // Since: Java 11 def writeString( diff --git a/javalib/src/main/scala/java/util/ArrayList.scala b/javalib/src/main/scala/java/util/ArrayList.scala index eebecd57b4..6f920c6d16 100644 --- a/javalib/src/main/scala/java/util/ArrayList.scala +++ b/javalib/src/main/scala/java/util/ArrayList.scala @@ -1,6 +1,7 @@ package java.util import java.io.Serializable +import java.util.function.Consumer // Added extra private constructors to handle all of the overloads. // To preserve method signatures, we cannot take ClassTag via implicit parameters. @@ -178,4 +179,39 @@ class ArrayList[E] private ( } _size = 0 } + + override def spliterator(): Spliterator[E] = { + /* Provide a more efficient spliterator. + * + * 'inner' has type Array[Any]. There is no Arrays.spliterator() method + * for element type Any. Closest is AnyRef but that is not close enough. + * + * Default spliterator from Collection.scala is provided by + * Spliterators.spliterator(collection) method. That uses the + * collection-in-question's iterator: here ArrayList + * + * ArrayList uses an iterator() implementation inherited from + * AbstractList.scala. That, eventually, returns a heavyweight + * RandomAccessListIterator. Given all that, custom spliterator has + * a good chance of having better performance, especially for large + * collections. + */ + + new Spliterators.AbstractSpliterator[E]( + _size, + Spliterator.SIZED | Spliterator.SUBSIZED + ) { + private var cursor = 0 + private val limit = _size + + def tryAdvance(action: Consumer[_ >: E]): Boolean = { + if (cursor >= limit) false + else { + action.accept(inner(cursor).asInstanceOf[E]) + cursor += 1 + true + } + } + } + } } diff --git a/javalib/src/main/scala/java/util/Arrays.scala b/javalib/src/main/scala/java/util/Arrays.scala index 2ed8c6f6c9..d878b3495f 100644 --- a/javalib/src/main/scala/java/util/Arrays.scala +++ b/javalib/src/main/scala/java/util/Arrays.scala @@ -1,5 +1,6 @@ // Ported from Scala.js commit: ba618ed dated: 2020-10-05 // Arrays.spliterator() methods added for Scala Native. +// Arrays.stream() methods added for Scala Native. package java.util @@ -7,7 +8,8 @@ import scala.annotation.tailrec import scala.reflect.ClassTag -import ScalaOps._ +import java.{util => ju} +import java.util.stream.StreamSupport object Arrays { @inline @@ -1080,7 +1082,7 @@ object Arrays { ) } - def spliterator[T](array: Array[Object]): Spliterator[T] = { + def spliterator[T](array: Array[AnyRef]): Spliterator[T] = { Objects.requireNonNull(array) Spliterators.spliterator( array, @@ -1091,7 +1093,7 @@ object Arrays { } def spliterator[T]( - array: Array[Object], + array: Array[AnyRef], startInclusive: Int, endExclusive: Int ): Spliterator[T] = { @@ -1103,4 +1105,81 @@ object Arrays { standardArraySpliteratorCharacteristics ) } + + def stream(array: Array[Double]): ju.stream.DoubleStream = { + Objects.requireNonNull(array) + + val spliter = Arrays.spliterator(array) + StreamSupport.doubleStream(spliter, parallel = false) + } + + def stream( + array: Array[Double], + startInclusive: Int, + endExclusive: Int + ): ju.stream.DoubleStream = { + Objects.requireNonNull(array) + + val spliter = Arrays.spliterator(array, startInclusive, endExclusive) + StreamSupport.doubleStream(spliter, parallel = false) + } + + def stream(array: Array[Int]): ju.stream.IntStream = { + Objects.requireNonNull(array) + + val spliter = Arrays.spliterator(array) + StreamSupport.intStream(spliter, parallel = false) + } + + def stream( + array: Array[Int], + startInclusive: Int, + endExclusive: Int + ): ju.stream.IntStream = { + Objects.requireNonNull(array) + + val spliter = Arrays.spliterator(array, startInclusive, endExclusive) + StreamSupport.intStream(spliter, parallel = false) + } + + def stream(array: Array[Long]): ju.stream.LongStream = { + Objects.requireNonNull(array) + + val spliter = Arrays.spliterator(array) + StreamSupport.longStream(spliter, parallel = false) + } + + def stream( + array: Array[Long], + startInclusive: Int, + endExclusive: Int + ): ju.stream.LongStream = { + Objects.requireNonNull(array) + + val spliter = Arrays.spliterator(array, startInclusive, endExclusive) + StreamSupport.longStream(spliter, parallel = false) + } + + def stream[T <: AnyRef](array: Array[T]): ju.stream.Stream[T] = { + Objects.requireNonNull(array) + + val spliter = Arrays.spliterator[T](array.asInstanceOf[Array[AnyRef]]) + StreamSupport.stream(spliter, parallel = false) + } + + def stream[T <: AnyRef]( + array: Array[T], + startInclusive: Int, + endExclusive: Int + ): ju.stream.Stream[T] = { + Objects.requireNonNull(array) + + val spliter = Arrays.spliterator[T]( + array.asInstanceOf[Array[AnyRef]], + startInclusive, + endExclusive + ) + + StreamSupport.stream(spliter, parallel = false) + } } diff --git a/javalib/src/main/scala/java/util/Collection.scala b/javalib/src/main/scala/java/util/Collection.scala index 25d9748698..1052379c02 100644 --- a/javalib/src/main/scala/java/util/Collection.scala +++ b/javalib/src/main/scala/java/util/Collection.scala @@ -1,9 +1,11 @@ // Ported from Scala.js commit: f122aa5 dated: 2019-07-03 // Additional Spliterator code implemented for Scala Native +// Additional Stream code implemented for Scala Native package java.util import java.util.function.Consumer import java.util.function.Predicate +import java.util.stream.{Stream, StreamSupport} trait Collection[E] extends java.lang.Iterable[E] { def size(): Int @@ -42,4 +44,16 @@ trait Collection[E] extends java.lang.Iterable[E] { override def spliterator(): Spliterator[E] = { Spliterators.spliterator[E](this, Spliterator.SIZED | Spliterator.SUBSIZED) } + + /* From the Java documentation: + * "The default implementation should be overridden by subclasses that + * "This method should be overridden when the spliterator() method cannot + * return a spliterator that is IMMUTABLE, CONCURRENT, or late-binding. + * (See spliterator() for details.)"" + */ + def stream(): Stream[E] = + StreamSupport.stream(this.spliterator(), parallel = false) + + def parallelStream(): Stream[E] = + StreamSupport.stream(this.spliterator(), parallel = true) } diff --git a/javalib/src/main/scala/java/util/DoubleSummaryStatistics.scala b/javalib/src/main/scala/java/util/DoubleSummaryStatistics.scala new file mode 100644 index 0000000000..8be6116be7 --- /dev/null +++ b/javalib/src/main/scala/java/util/DoubleSummaryStatistics.scala @@ -0,0 +1,63 @@ +package java.util + +import java.{lang => jl} + +class DoubleSummaryStatistics() { + private var count: Long = 0L + private var min: Double = jl.Double.POSITIVE_INFINITY + private var max: Double = jl.Double.NEGATIVE_INFINITY + private var sum: Double = 0.0 + + def this(count: Long, min: Double, max: Double, sum: Double) = { + this() + this.count = count + this.min = min + this.max = max + this.sum = sum + } + + def accept(value: Double): Unit = { + count += 1L + sum += value + + if (value < min) + min = value + + if (value > max) + max = value + } + + def combine(other: DoubleSummaryStatistics): Unit = { + count += other.count + sum += other.sum + + if (other.min < min) + min = other.min + + if (other.max > max) + max = other.max + } + + final def getAverage(): Double = + if (count == 0) 0.0 // as defined by JVM DoubleSummaryStatistics + else sum / count + + final def getCount(): Long = count + + final def getMax(): Double = max + + final def getMin(): Double = min + + final def getSum(): Double = sum + + override def toString(): String = { + "DoubleSummaryStatistics{" + + s"count=${count}, " + + s"sum=${sum}, " + + s"min=${min}, " + + s"average=${getAverage()}, " + + s"max=${max}" + + "}" + } + +} diff --git a/javalib/src/main/scala/java/util/IntSummaryStatistics.scala b/javalib/src/main/scala/java/util/IntSummaryStatistics.scala new file mode 100644 index 0000000000..05e239d4c2 --- /dev/null +++ b/javalib/src/main/scala/java/util/IntSummaryStatistics.scala @@ -0,0 +1,63 @@ +package java.util + +import java.{lang => jl} + +class IntSummaryStatistics() { + private var count: Long = 0L + private var min: Int = jl.Integer.MAX_VALUE + private var max: Int = jl.Integer.MIN_VALUE + private var sum: Long = 0L + + def this(count: Long, min: Int, max: Int, sum: Long) = { + this() + this.count = count + this.min = min + this.max = max + this.sum = sum + } + + def accept(value: Int): Unit = { + count += 1L + sum += value + + if (value < min) + min = value + + if (value > max) + max = value + } + + def combine(other: IntSummaryStatistics): Unit = { + count += other.count + sum += other.sum + + if (other.min < min) + min = other.min + + if (other.max > max) + max = other.max + } + + final def getAverage(): Double = + if (count == 0) 0.0 // as defined by JVM IntSummaryStatistics + else sum.toDouble / count.toDouble + + final def getCount(): Long = count + + final def getMax(): Int = max + + final def getMin(): Int = min + + final def getSum(): Long = sum + + override def toString(): String = { + "IntSummaryStatistics{" + + s"count=${count}, " + + s"sum=${sum}, " + + s"min=${min}, " + + s"average=${getAverage()}, " + + s"max=${max}" + + "}" + } + +} diff --git a/javalib/src/main/scala/java/util/LongSummaryStatistics.scala b/javalib/src/main/scala/java/util/LongSummaryStatistics.scala new file mode 100644 index 0000000000..563424231e --- /dev/null +++ b/javalib/src/main/scala/java/util/LongSummaryStatistics.scala @@ -0,0 +1,66 @@ +package java.util + +import java.{lang => jl} + +class LongSummaryStatistics() { + private var count: Long = 0L + private var min: Long = jl.Long.MAX_VALUE + private var max: Long = jl.Long.MIN_VALUE + private var sum: Long = 0L + + def this(count: Long, min: Long, max: Long, sum: Long) = { + this() + this.count = count + this.min = min + this.max = max + this.sum = sum + } + + def accept(value: Int): Unit = + accept(value.toLong) + + def accept(value: Long): Unit = { + count += 1L + sum += value + + if (value < min) + min = value + + if (value > max) + max = value + } + + def combine(other: LongSummaryStatistics): Unit = { + count += other.count + sum += other.sum + + if (other.min < min) + min = other.min + + if (other.max > max) + max = other.max + } + + final def getAverage(): Double = + if (count == 0) 0.0 // as defined by JVM LongSummaryStatistics + else sum.toDouble / count.toDouble + + final def getCount(): Long = count + + final def getMax(): Long = max + + final def getMin(): Long = min + + final def getSum(): Long = sum + + override def toString(): String = { + "LongSummaryStatistics{" + + s"count=${count}, " + + s"sum=${sum}, " + + s"min=${min}, " + + s"average=${getAverage()}, " + + s"max=${max}" + + "}" + } + +} diff --git a/javalib/src/main/scala/java/util/Optional.scala b/javalib/src/main/scala/java/util/Optional.scala index 58fbe6cbb2..05d3072dd0 100644 --- a/javalib/src/main/scala/java/util/Optional.scala +++ b/javalib/src/main/scala/java/util/Optional.scala @@ -1,8 +1,10 @@ package java.util // Ported from Scala.js commit SHA1: 9c79cb9 dated: 2022-03-18 +// stream() method added for Scala Native import java.util.function._ +import java.util.{stream => jus} final class Optional[T] private (value: T) { import Optional._ @@ -62,6 +64,11 @@ final class Optional[T] private (value: T) { if (isPresent()) value else throw exceptionSupplier.get() + // Since: Java 9 + def stream(): jus.Stream[T] = + if (isPresent()) jus.Stream.of(value.asInstanceOf[Object]) + else jus.Stream.empty[T]() + override def equals(obj: Any): Boolean = { obj match { case opt: Optional[_] => diff --git a/javalib/src/main/scala/java/util/OptionalDouble.scala b/javalib/src/main/scala/java/util/OptionalDouble.scala new file mode 100644 index 0000000000..a196f9c3e6 --- /dev/null +++ b/javalib/src/main/scala/java/util/OptionalDouble.scala @@ -0,0 +1,85 @@ +package java.util + +// New work for Scala Native. Based on Scala Native Optional.scala: +// Ported from Scala.js commit SHA1: 9c79cb9 dated: 2022-03-18 + +import java.util.function._ +import java.util.{stream => jus} + +final class OptionalDouble private (hasValue: Boolean, value: Double) { + + def getAsDouble(): Double = { + if (!isPresent()) + throw new NoSuchElementException() + else + value + } + + @inline def isPresent(): Boolean = hasValue + + // Since: Java 11 + @inline def isEmpty(): Boolean = !hasValue + + def ifPresent(action: DoubleConsumer): Unit = { + if (isPresent()) + action.accept(value) + } + + // Since: Java 9 + def ifPresentOrElse(action: DoubleConsumer, emptyAction: Runnable): Unit = { + if (isPresent()) + action.accept(value) + else + emptyAction.run() + } + + def orElse(other: Double): Double = + if (isPresent()) value + else other + + def orElseGet(supplier: DoubleSupplier): Double = + if (isPresent()) value + else supplier.getAsDouble() + + // Since: Java 10 + def orElseThrow(): Double = + if (isPresent()) value + else throw new NoSuchElementException() + + def orElseThrow[X <: Throwable](exceptionSupplier: Supplier[_ <: X]): Double = + if (isPresent()) value + else throw exceptionSupplier.get() + + // Since: Java 9 + def stream(): jus.DoubleStream = + if (isPresent()) jus.DoubleStream.of(value) + else jus.DoubleStream.empty() + + override def equals(obj: Any): Boolean = { + obj match { + case opt: OptionalDouble => + (!isPresent() && !opt.isPresent()) || + (isPresent() && opt + .isPresent() && Objects.equals(value, opt.getAsDouble())) + case _ => false + } + } + + override def hashCode(): Int = { + if (!isPresent()) 0 + else value.hashCode() + } + + override def toString(): String = { + if (!isPresent()) "Optional.empty" + else s"OptionalDouble[$value]" + } +} + +object OptionalDouble { + def empty(): OptionalDouble = new OptionalDouble(hasValue = false, Double.NaN) + + def of(value: Double): OptionalDouble = { + new OptionalDouble(hasValue = true, value) + } +} diff --git a/javalib/src/main/scala/java/util/OptionalInt.scala b/javalib/src/main/scala/java/util/OptionalInt.scala new file mode 100644 index 0000000000..eabbcb0c76 --- /dev/null +++ b/javalib/src/main/scala/java/util/OptionalInt.scala @@ -0,0 +1,85 @@ +package java.util + +// New work for Scala Native. Based on Scala Native Optional.scala: +// Ported from Scala.js commit SHA1: 9c79cb9 dated: 2022-03-18 + +import java.util.function._ +import java.util.{stream => jus} + +final class OptionalInt private (hasValue: Boolean, value: Int) { + + def getAsInt(): Int = { + if (!isPresent()) + throw new NoSuchElementException() + else + value + } + + @inline def isPresent(): Boolean = hasValue + + // Since: Java 11 + @inline def isEmpty(): Boolean = !hasValue + + def ifPresent(action: IntConsumer): Unit = { + if (isPresent()) + action.accept(value) + } + + // Since: Java 9 + def ifPresentOrElse(action: IntConsumer, emptyAction: Runnable): Unit = { + if (isPresent()) + action.accept(value) + else + emptyAction.run() + } + + def orElse(other: Int): Int = + if (isPresent()) value + else other + + def orElseGet(supplier: IntSupplier): Int = + if (isPresent()) value + else supplier.getAsInt() + + // Since: Java 10 + def orElseThrow(): Int = + if (isPresent()) value + else throw new NoSuchElementException() + + def orElseThrow[X <: Throwable](exceptionSupplier: Supplier[_ <: X]): Int = + if (isPresent()) value + else throw exceptionSupplier.get() + + // Since: Java 9 + def stream(): jus.IntStream = + if (isPresent()) jus.IntStream.of(value) + else jus.IntStream.empty() + + override def equals(obj: Any): Boolean = { + obj match { + case opt: OptionalInt => + (!isPresent() && !opt.isPresent()) || + (isPresent() && opt + .isPresent() && Objects.equals(value, opt.getAsInt())) + case _ => false + } + } + + override def hashCode(): Int = { + if (!isPresent()) 0 + else value.hashCode() + } + + override def toString(): String = { + if (!isPresent()) "Optional.empty" + else s"OptionalInt[$value]" + } +} + +object OptionalInt { + def empty(): OptionalInt = new OptionalInt(hasValue = false, 0) + + def of(value: Int): OptionalInt = { + new OptionalInt(hasValue = true, value) + } +} diff --git a/javalib/src/main/scala/java/util/OptionalLong.scala b/javalib/src/main/scala/java/util/OptionalLong.scala new file mode 100644 index 0000000000..575fe44179 --- /dev/null +++ b/javalib/src/main/scala/java/util/OptionalLong.scala @@ -0,0 +1,85 @@ +package java.util + +// New work for Scala Native. Based on Scala Native Optional.scala: +// Ported from Scala.js commit SHA1: 9c79cb9 dated: 2022-03-18 + +import java.util.function._ +import java.util.{stream => jus} + +final class OptionalLong private (hasValue: Boolean, value: Long) { + + def getAsLong(): Long = { + if (!isPresent()) + throw new NoSuchElementException() + else + value + } + + @inline def isPresent(): Boolean = hasValue + + // Since: Java 11 + @inline def isEmpty(): Boolean = !hasValue + + def ifPresent(action: LongConsumer): Unit = { + if (isPresent()) + action.accept(value) + } + + // Since: Java 9 + def ifPresentOrElse(action: LongConsumer, emptyAction: Runnable): Unit = { + if (isPresent()) + action.accept(value) + else + emptyAction.run() + } + + def orElse(other: Long): Long = + if (isPresent()) value + else other + + def orElseGet(supplier: LongSupplier): Long = + if (isPresent()) value + else supplier.getAsLong() + + // Since: Java 10 + def orElseThrow(): Long = + if (isPresent()) value + else throw new NoSuchElementException() + + def orElseThrow[X <: Throwable](exceptionSupplier: Supplier[_ <: X]): Long = + if (isPresent()) value + else throw exceptionSupplier.get() + + // Since: Java 9 + def stream(): jus.LongStream = + if (isPresent()) jus.LongStream.of(value) + else jus.LongStream.empty() + + override def equals(obj: Any): Boolean = { + obj match { + case opt: OptionalLong => + (!isPresent() && !opt.isPresent()) || + (isPresent() && opt + .isPresent() && Objects.equals(value, opt.getAsLong())) + case _ => false + } + } + + override def hashCode(): Int = { + if (!isPresent()) 0 + else value.hashCode() + } + + override def toString(): String = { + if (!isPresent()) "Optional.empty" + else s"OptionalLong[$value]" + } +} + +object OptionalLong { + def empty(): OptionalLong = new OptionalLong(hasValue = false, 0L) + + def of(value: Long): OptionalLong = { + new OptionalLong(hasValue = true, value) + } +} diff --git a/javalib/src/main/scala/java/util/regex/Pattern.scala b/javalib/src/main/scala/java/util/regex/Pattern.scala index a031fe4954..258315b24f 100644 --- a/javalib/src/main/scala/java/util/regex/Pattern.scala +++ b/javalib/src/main/scala/java/util/regex/Pattern.scala @@ -3,10 +3,9 @@ package regex import scalanative.{regex => snRegex} +import java.util.Arrays import java.util.function.Predicate import java.util.stream.Stream -import java.util.stream.WrappedScalaStream -import scala.scalanative.compat.StreamsCompat._ // Inspired & informed by: // https://github.com/google/re2j/blob/master/java/com/google/re2j/Pattern.java @@ -133,7 +132,9 @@ final class Pattern private[regex] (_regex: String, _flags: Int) { compiled.split(input, limit) def splitAsStream(input: CharSequence): Stream[String] = - new WrappedScalaStream(split(input).toScalaStream, None) + Arrays + .stream(split(input)) + .asInstanceOf[Stream[String]] override def toString: String = _regex } diff --git a/javalib/src/main/scala/java/util/stream/BaseStream.scala b/javalib/src/main/scala/java/util/stream/BaseStream.scala index 6ac783af38..655c7d569e 100644 --- a/javalib/src/main/scala/java/util/stream/BaseStream.scala +++ b/javalib/src/main/scala/java/util/stream/BaseStream.scala @@ -1,15 +1,15 @@ package java.util.stream import java.util.Iterator +import java.util.Spliterator trait BaseStream[+T, +S <: BaseStream[T, S]] extends AutoCloseable { - // def spliterator(): Spliterator[T] - def close(): Unit def isParallel(): Boolean def iterator(): Iterator[_ <: T] def onClose(closeHandler: Runnable): S def parallel(): S def sequential(): S + def spliterator(): Spliterator[_ <: T] def unordered(): S } diff --git a/javalib/src/main/scala/java/util/stream/Collector.scala b/javalib/src/main/scala/java/util/stream/Collector.scala new file mode 100644 index 0000000000..5859f6f324 --- /dev/null +++ b/javalib/src/main/scala/java/util/stream/Collector.scala @@ -0,0 +1,99 @@ +package java.util.stream + +import java.util.{Collections, HashSet, Set} +import java.util.function._ + +trait Collector[T, A, R] { + + def accumulator(): BiConsumer[A, T] + + def characteristics(): Set[Collector.Characteristics] + + def combiner(): BinaryOperator[A] + + def finisher(): Function[A, R] + + def supplier(): Supplier[A] +} + +object Collector { + sealed class Characteristics(name: String, ordinal: Int) + extends _Enum[Characteristics](name, ordinal) { + override def toString() = this.name + } + + object Characteristics { + final val CONCURRENT = new Characteristics("CONCURRENT", 0) + final val UNORDERED = new Characteristics("UNORDERED", 1) + final val IDENTITY_FINISH = new Characteristics("IDENTITY_FINISH", 2) + + private[this] val cachedValues = + Array(CONCURRENT, IDENTITY_FINISH, UNORDERED) + + def values(): Array[Characteristics] = cachedValues.clone() + + def valueOf(name: String): Characteristics = { + cachedValues.find(_.name() == name).getOrElse { + throw new IllegalArgumentException( + s"No enum const Collector.Characteristics. ${name}" + ) + } + } + } + + private def createCharacteristicsSet( + addIdentity: Boolean, + ccs: Collector.Characteristics* + ): Set[Collector.Characteristics] = { + val hs = new HashSet[Collector.Characteristics]() + + if (addIdentity) + hs.add(Characteristics.IDENTITY_FINISH) + + for (c <- ccs) + hs.add(c) + + Collections.unmodifiableSet(hs) + } + + def of[T, A, R]( + _supplier: Supplier[A], + _accumulator: BiConsumer[A, T], + _combiner: BinaryOperator[A], + _finisher: Function[A, R], // Note trailing comma + _characteristics: Collector.Characteristics* + ): Collector[T, A, R] = { + new Collector[T, A, R] { + def accumulator(): BiConsumer[A, T] = _accumulator + + def characteristics(): Set[Collector.Characteristics] = + createCharacteristicsSet(false, _characteristics: _*) + + def combiner(): BinaryOperator[A] = _combiner + + def finisher(): Function[A, R] = _finisher + + def supplier(): Supplier[A] = _supplier + } + } + + def of[T, R]( + _supplier: Supplier[R], + _accumulator: BiConsumer[R, T], + _combiner: BinaryOperator[R], + _characteristics: Collector.Characteristics* + ): Collector[T, R, R] = { + new Collector[T, R, R] { + def accumulator(): BiConsumer[R, T] = _accumulator + + def characteristics(): Set[Collector.Characteristics] = + createCharacteristicsSet(true, _characteristics: _*) + + def combiner(): BinaryOperator[R] = _combiner + + def finisher(): Function[R, R] = (r: R) => r + + def supplier(): Supplier[R] = _supplier + } + } +} diff --git a/javalib/src/main/scala/java/util/stream/Collectors.scala b/javalib/src/main/scala/java/util/stream/Collectors.scala new file mode 100644 index 0000000000..0c941ce77f --- /dev/null +++ b/javalib/src/main/scala/java/util/stream/Collectors.scala @@ -0,0 +1,1413 @@ +package java.util.stream + +import java.lang.StringBuilder + +import java.util._ + +import java.util.concurrent.{ConcurrentMap, ConcurrentHashMap} + +import java.util.function._ + +import java.util.stream.Collector.Characteristics + +/* Design Notes: + * * This implementation is complete through Java 12, the + * last version with changes to this class. Any missing method is a bug. + * + * * Many methods in this file could have been written entirely using + * lambdas for the arguments to the called Collector. This is + * idiomatic, concise, and elegant. + * + * By design & intent, this file is implemented with a concern for + * corrections and maintenance. In many cases, separate variables are + * used where the equivalent lambda would be complex or more than a line + * or two. + * This makes it easier, for some, to parse the complex call and make + * point edits at the intended place. + * + * When the code is stable and proven, it can be converted to the + * all-lambda style and submitted to the Obfuscated Scala contest. + */ + +object Collectors { + + def averagingDouble[T]( + mapper: ToDoubleFunction[_ >: T] + ): Collector[T, AnyRef, Double] = { + type A = DoubleSummaryStatistics + + Collector + .of[T, A, Double]( + () => new A, + (stats: A, e: T) => stats.accept(mapper.applyAsDouble(e)), + (stats1: A, stats2: A) => { + stats1.combine(stats2) + stats1 + }, + (stats: A) => stats.getAverage() + ) + .asInstanceOf[Collector[T, AnyRef, Double]] + } + + def averagingInt[T]( + mapper: ToIntFunction[_ >: T] + ): Collector[T, AnyRef, Double] = { + type A = IntSummaryStatistics + + Collector + .of[T, A, Double]( + () => new A, + (stats: A, e: T) => stats.accept(mapper.applyAsInt(e)), + (stats1: A, stats2: A) => { + stats1.combine(stats2) + stats1 + }, + (stats: A) => stats.getAverage() + ) + .asInstanceOf[Collector[T, AnyRef, Double]] + } + + def averagingLong[T]( + mapper: ToLongFunction[_ >: T] + ): Collector[T, AnyRef, Double] = { + type A = LongSummaryStatistics + + Collector + .of[T, A, Double]( + () => new A, + (stats: A, e: T) => stats.accept(mapper.applyAsLong(e)), + (stats1: A, stats2: A) => { + stats1.combine(stats2) + stats1 + }, + (stats: A) => stats.getAverage() + ) + .asInstanceOf[Collector[T, AnyRef, Double]] + } + + def collectingAndThen[T, A, R, RR]( + downstream: Collector[T, A, R], + finisher: Function[R, RR] + ): Collector[T, A, RR] = { + + val transformingFinisher = + new Function[A, RR] { + def apply(accum: A): RR = + finisher(downstream.finisher()(accum)) + } + + def removeIdentityFinish( + original: Set[Collector.Characteristics] + ): Set[Collector.Characteristics] = { + val hs = new HashSet[Collector.Characteristics] + + original.forEach(c => + if (c != Collector.Characteristics.IDENTITY_FINISH) + hs.add(c) + ) + + hs + } + + collectorOf[T, A, RR]( + downstream.supplier(), + downstream.accumulator(), + downstream.combiner(), + transformingFinisher, + removeIdentityFinish(downstream.characteristics()) + ) + } + + def counting[T](): Collector[T, AnyRef, Long] = { + type A = Array[Long] + + val supplier = new Supplier[A] { + def get(): A = { + val arr = new A(1) + arr(0) = 0L + arr + } + } + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + val acc = accum(0) + accum(0) = accum(0) + 1L + } + } + + val combiner = new BinaryOperator[A] { + def apply( + count1: A, + count2: A + ): A = { + count1(0) = count1(0) + count2(0) + count1 + } + } + + Collector + .of[T, Array[Long], Long]( + supplier, + accumulator, + combiner, + (counter: Array[Long]) => counter(0) + ) + .asInstanceOf[Collector[T, AnyRef, Long]] + } + + // Since: Java 9 + def filtering[T, A, R]( + predicate: Predicate[_ >: T], + downstream: Collector[_ >: T, A, R] + ): Collector[T, AnyRef, R] = { + + val dsAccumulator = downstream.accumulator() + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + if (predicate.test(element)) + dsAccumulator.accept(accum, element) + } + } + + collectorOf[T, A, R]( + downstream.supplier(), + accumulator, + downstream.combiner(), + downstream.finisher(), + downstream.characteristics() + ) + .asInstanceOf[Collector[T, AnyRef, R]] + } + + // Since: Java 9 + def flatMapping[T, U, A, R]( + mapper: Function[_ >: T, _ <: Stream[U]], + downstream: Collector[_ >: U, A, R] + ): Collector[T, AnyRef, R] = { + + val dsAccumulator = downstream.accumulator() + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + mapper(element).forEach(e => dsAccumulator.accept(accum, e)) + } + } + + collectorOf[T, A, R]( + downstream.supplier(), + accumulator, + downstream.combiner(), + downstream.finisher(), + downstream.characteristics() + ) + .asInstanceOf[Collector[T, AnyRef, R]] + } + + def groupingBy[T, K]( + classifier: Function[_ >: T, _ <: K] + ): Collector[T, AnyRef, Map[K, List[T]]] = { + type A = HashMap[K, ArrayList[T]] + + val supplier = new Supplier[A] { + def get(): A = new A + } + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + val key = classifier(element) + accum.compute( + key, + (k, oldValue) => { + val list = + if (oldValue != null) oldValue + else new ArrayList[T]() + list.add(element) + list + } + ) + } + } + + val combiner = new BinaryOperator[A] { + def apply( + map1: A, + map2: A + ): A = { + map1.putAll(map2) + map1 + } + } + + Collector + .of[T, A]( + supplier, + accumulator, + combiner + ) + .asInstanceOf[Collector[T, AnyRef, Map[K, List[T]]]] + } + + def groupingBy[T, K, D, A, M <: Map[K, D]]( + classifier: Function[_ >: T, _ <: K], + mapFactory: Supplier[M], + downstream: Collector[_ >: T, A, D] + ): Collector[T, AnyRef, M] = { + + // The type of the workspace need not be the type A of downstream container + val workspace = new Supplier[HashMap[K, ArrayList[T]]] { + def get(): HashMap[K, ArrayList[T]] = { + new HashMap[K, ArrayList[T]] + } + } + + val accumulator = new BiConsumer[HashMap[K, ArrayList[T]], T] { + def accept(accum: HashMap[K, ArrayList[T]], element: T): Unit = { + val key = classifier(element) + accum.compute( + key, + (k, oldValue) => { + val list = + if (oldValue != null) oldValue + else new ArrayList[T]() + list.add(element) + list + } + ) + } + } + + val combiner = new BinaryOperator[HashMap[K, ArrayList[T]]] { + def apply( + map1: HashMap[K, ArrayList[T]], + map2: HashMap[K, ArrayList[T]] + ): HashMap[K, ArrayList[T]] = { + map1.putAll(map2) + map1 + } + } + + val finisher = + new Function[HashMap[K, ArrayList[T]], M] { + def apply(accum: HashMap[K, ArrayList[T]]): M = { + val resultMap = mapFactory.get() + + accum.forEach((k, v) => { + val reduced = v.stream().collect(downstream) + resultMap.put(k, reduced) + }) + + resultMap + } + } + + Collector + .of[T, HashMap[K, ArrayList[T]], M]( + workspace, + accumulator, + combiner, + finisher + ) + .asInstanceOf[Collector[T, AnyRef, M]] + } + + def groupingBy[T, K, A, D]( + classifier: Function[_ >: T, _ <: K], + downstream: Collector[_ >: T, A, D] + ): Collector[T, AnyRef, Map[K, D]] = { + + val supplier = new Supplier[HashMap[K, ArrayList[T]]] { + def get(): HashMap[K, ArrayList[T]] = { + new HashMap[K, ArrayList[T]] + } + } + + val accumulator = new BiConsumer[HashMap[K, ArrayList[T]], T] { + def accept(accum: HashMap[K, ArrayList[T]], element: T): Unit = { + val key = classifier(element) + accum.compute( + key, + (k, oldValue) => { + val list = + if (oldValue != null) oldValue + else new ArrayList[T]() + list.add(element) + list + } + ) + } + } + + val combiner = new BinaryOperator[HashMap[K, ArrayList[T]]] { + def apply( + map1: HashMap[K, ArrayList[T]], + map2: HashMap[K, ArrayList[T]] + ): HashMap[K, ArrayList[T]] = { + map1.putAll(map2) + map1 + } + } + + val finisher = + new Function[HashMap[K, ArrayList[T]], HashMap[K, D]] { + def apply(accum: HashMap[K, ArrayList[T]]): HashMap[K, D] = { + val resultMap = new HashMap[K, D](accum.size()) + + accum.forEach((k, v) => { + val reduced = v.stream().collect(downstream) + resultMap.put(k, reduced) + }) + + resultMap + } + } + + Collector + .of[T, HashMap[K, ArrayList[T]], HashMap[K, D]]( + supplier, + accumulator, + combiner, + finisher + ) + .asInstanceOf[Collector[T, AnyRef, Map[K, D]]] + } + + def groupingByConcurrent[T, K]( + classifier: Function[_ >: T, _ <: K] + ): Collector[T, AnyRef, ConcurrentMap[K, List[T]]] = { + type A = ConcurrentHashMap[K, ArrayList[T]] + + val supplier = new Supplier[A] { + def get(): A = { + new A + } + } + + val accumulator = new BiConsumer[A, T] { + def accept( + accum: A, + element: T + ): Unit = { + val key = classifier(element) + accum.compute( + key, + (k, oldValue) => { + val list = + if (oldValue != null) oldValue + else new ArrayList[T]() + list.add(element) + list + } + ) + } + } + + val combiner = new BinaryOperator[A] { + def apply( + map1: A, + map2: A + ): A = { + map1.putAll(map2) + map1 + } + } + + Collector + .of[T, A]( + supplier, + accumulator, + combiner, + Collector.Characteristics.CONCURRENT, + Collector.Characteristics.UNORDERED + // This 4 arg Collector constructor will add IDENTITY_FINISH. + ) + .asInstanceOf[Collector[T, AnyRef, ConcurrentMap[K, List[T]]]] + } + + def groupingByConcurrent[T, K, D, A, M <: ConcurrentMap[K, D]]( + classifier: Function[_ >: T, _ <: K], + mapFactory: Supplier[M], + downstream: Collector[_ >: T, A, D] + ): Collector[T, AnyRef, M] = { + + // The type of the workspace need not be the type A of downstream container + val workspace = new Supplier[ConcurrentHashMap[K, ArrayList[T]]] { + def get(): ConcurrentHashMap[K, ArrayList[T]] = { + new ConcurrentHashMap[K, ArrayList[T]] + } + } + + val accumulator = new BiConsumer[ConcurrentHashMap[K, ArrayList[T]], T] { + def accept( + accum: ConcurrentHashMap[K, ArrayList[T]], + element: T + ): Unit = { + val key = classifier(element) + accum.compute( + key, + (k, oldValue) => { + val list = + if (oldValue != null) oldValue + else new ArrayList[T]() + list.add(element) + list + } + ) + } + } + + val combiner = new BinaryOperator[ConcurrentHashMap[K, ArrayList[T]]] { + def apply( + map1: ConcurrentHashMap[K, ArrayList[T]], + map2: ConcurrentHashMap[K, ArrayList[T]] + ): ConcurrentHashMap[K, ArrayList[T]] = { + map1.putAll(map2) + map1 + } + } + + val finisher = + new Function[ConcurrentHashMap[K, ArrayList[T]], M] { + def apply(accum: ConcurrentHashMap[K, ArrayList[T]]): M = { + val resultMap = mapFactory.get() + + accum.forEach((k, v) => { + val reduced = v.stream().collect(downstream) + resultMap.put(k, reduced) + }) + + resultMap + } + } + + Collector + .of[T, ConcurrentHashMap[K, ArrayList[T]], M]( + workspace, + accumulator, + combiner, + finisher, + Collector.Characteristics.CONCURRENT, + Collector.Characteristics.UNORDERED + ) + .asInstanceOf[Collector[T, AnyRef, M]] + } + + def groupingByConcurrent[T, K, A, D]( + classifier: Function[_ >: T, _ <: K], + downstream: Collector[_ >: T, A, D] + ): Collector[T, AnyRef, ConcurrentMap[K, D]] = { + + // The type of the workspace need not be the type A of downstream container + + val supplier = new Supplier[ConcurrentHashMap[K, ArrayList[T]]] { + def get(): ConcurrentHashMap[K, ArrayList[T]] = { + new ConcurrentHashMap[K, ArrayList[T]] + } + } + + val accumulator = new BiConsumer[ConcurrentHashMap[K, ArrayList[T]], T] { + def accept( + accum: ConcurrentHashMap[K, ArrayList[T]], + element: T + ): Unit = { + val key = classifier(element) + accum.compute( + key, + (k, oldValue) => { + val list = + if (oldValue != null) oldValue + else new ArrayList[T]() + list.add(element) + list + } + ) + } + } + + val combiner = new BinaryOperator[ConcurrentHashMap[K, ArrayList[T]]] { + def apply( + map1: ConcurrentHashMap[K, ArrayList[T]], + map2: ConcurrentHashMap[K, ArrayList[T]] + ): ConcurrentHashMap[K, ArrayList[T]] = { + map1.putAll(map2) + map1 + } + } + + val finisher = + new Function[ + ConcurrentHashMap[K, ArrayList[T]], + ConcurrentHashMap[K, D] + ] { + def apply( + accum: ConcurrentHashMap[K, ArrayList[T]] + ): ConcurrentHashMap[K, D] = { + val resultMap = new ConcurrentHashMap[K, D](accum.size()) + + accum.forEach((k, v) => { + val reduced = v.stream().collect(downstream) + resultMap.put(k, reduced) + }) + + resultMap + } + } + + Collector + .of[T, ConcurrentHashMap[K, ArrayList[T]], ConcurrentHashMap[K, D]]( + supplier, + accumulator, + combiner, + finisher, + Collector.Characteristics.CONCURRENT, + Collector.Characteristics.UNORDERED + ) + .asInstanceOf[Collector[T, AnyRef, ConcurrentMap[K, D]]] + } + + def joining(): Collector[CharSequence, AnyRef, String] = + joining("", "", "") + + def joining( + delimiter: CharSequence + ): Collector[CharSequence, AnyRef, String] = + joining(delimiter, "", "") + + def joining( + delimiter: CharSequence, + prefix: CharSequence, + suffix: CharSequence + ): Collector[CharSequence, AnyRef, String] = { + val delimiterLength = delimiter.length() + + val supplier = new Supplier[StringBuilder] { + def get(): StringBuilder = { + val sb = new StringBuilder() + if (prefix != "") + sb.append(prefix) + sb + } + } + + val accumulator = new BiConsumer[StringBuilder, CharSequence] { + def accept(accum: StringBuilder, element: CharSequence): Unit = { + val acc = accum.append(element) + if (delimiter != "") + accum.append(delimiter) + } + } + + val combiner = new BinaryOperator[StringBuilder] { + def apply( + sb1: StringBuilder, + sb2: StringBuilder + ): StringBuilder = { + sb1.append(sb2) + } + } + + val finisher = + new Function[StringBuilder, String] { + def apply(accum: StringBuilder): String = { + + if ((accum.length() > prefix.length()) && (delimiterLength > 0)) { + /* This branch means accum has contents beyond a possible prefix. + * If a delimiter arg was is specified, accumlator() will have + * appended that delimiter. A delimiter is unwanted after what is + * now known to be the last item, so trim it off before possibly + * adding a suffix. + */ + val lastIndex = accum.length() - delimiterLength + accum.setLength(lastIndex) // trim off last delimiter sequence. + } + // Else empty stream; no token accepted, hence no delimiter to trim. + + if (suffix != "") + accum.append(suffix) + + accum.toString() + } + } + + Collector + .of[CharSequence, StringBuilder, String]( + supplier, + accumulator, + combiner, + finisher + ) + .asInstanceOf[Collector[CharSequence, AnyRef, String]] + } + + def mapping[T, U, A, R]( + mapper: Function[_ >: T, _ <: U], + downstream: Collector[_ >: U, A, R] + ): Collector[T, AnyRef, R] = { + + val dsAccumulator = downstream.accumulator() + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + dsAccumulator.accept(accum, mapper(element)) + } + } + + collectorOf[T, A, R]( + downstream.supplier(), + accumulator, + downstream.combiner(), + downstream.finisher(), + downstream.characteristics() + ) + .asInstanceOf[Collector[T, AnyRef, R]] + } + + def maxBy[T]( + comparator: Comparator[_ >: T] + ): Collector[T, AnyRef, Optional[T]] = { + type A = Array[Optional[T]] + + val supplier = new Supplier[A] { + def get(): A = { + val arr = new A(1) + arr(0) = Optional.empty[T]() + arr + } + } + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + val acc = accum(0) + + if (acc.isEmpty() || (comparator.compare(acc.get(), element) < 0)) + accum(0) = Optional.of(element) + } + } + + Collector + .of[T, A, Optional[T]]( + supplier, + accumulator, + (max1: Array[Optional[T]], max2: Array[Optional[T]]) => + if (!max1(0).isPresent()) max2 + else if (!max2(0).isPresent()) max1 + else if (comparator.compare(max1(0).get(), max2(0).get()) < 0) max2 + else max1, + acc => acc(0) + ) + .asInstanceOf[Collector[T, AnyRef, Optional[T]]] + } + + def minBy[T]( + comparator: Comparator[_ >: T] + ): Collector[T, AnyRef, Optional[T]] = { + type A = Array[Optional[T]] + + val supplier = new Supplier[A] { + def get(): A = { + val arr = new A(1) + arr(0) = Optional.empty[T]() + arr + } + } + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + val acc = accum(0) + + if (acc.isEmpty() || (comparator.compare(acc.get(), element) > 0)) + accum(0) = Optional.of(element) + } + } + + Collector + .of[T, A, Optional[T]]( + supplier, + accumulator, + (min1: Array[Optional[T]], min2: Array[Optional[T]]) => + if (!min1(0).isPresent()) min2 + else if (!min2(0).isPresent()) min1 + else if (comparator.compare(min1(0).get(), min2(0).get()) > 0) min2 + else min1, + acc => acc(0) + ) + .asInstanceOf[Collector[T, AnyRef, Optional[T]]] + } + + def partitioningBy[T]( + predicate: Predicate[_ >: T] + ): Collector[T, AnyRef, Map[Boolean, List[T]]] = { + type A = HashMap[Boolean, ArrayList[T]] + + val supplier = new Supplier[A] { + def get(): A = { + val map = new A + map.put(false, new ArrayList[T]) + map.put(true, new ArrayList[T]) + map + } + } + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + val dst = accum.get(predicate.test(element)) + dst.add(element) + } + } + + Collector + .of[T, A]( + supplier, + accumulator, + ( + map1: HashMap[Boolean, ArrayList[T]], + map2: HashMap[Boolean, ArrayList[T]] + ) => { + map1.putAll(map2) + map1 + } + ) + .asInstanceOf[Collector[T, AnyRef, Map[Boolean, List[T]]]] + } + + def partitioningBy[T, D, A]( + predicate: Predicate[_ >: T], + downstream: Collector[_ >: T, A, D] + ): Collector[T, AnyRef, Map[Boolean, D]] = { + + val supplier = new Supplier[HashMap[Boolean, ArrayList[T]]] { + def get(): HashMap[Boolean, ArrayList[T]] = { + val map = new HashMap[Boolean, ArrayList[T]] + map.put(false, new ArrayList[T]) + map.put(true, new ArrayList[T]) + map + } + } + + val accumulator = new BiConsumer[HashMap[Boolean, ArrayList[T]], T] { + def accept(accum: HashMap[Boolean, ArrayList[T]], element: T): Unit = { + val dst = accum.get(predicate.test(element)) + dst.add(element) + } + } + + val finisher = + new Function[HashMap[Boolean, ArrayList[T]], Map[Boolean, D]] { + def apply(accum: HashMap[Boolean, ArrayList[T]]): Map[Boolean, D] = { + val resultMap = new HashMap[Boolean, D] + + val trueValue = accum.get(true).stream().collect(downstream) + resultMap.put(true, trueValue) + + val falseValue = accum.get(false).stream().collect(downstream) + resultMap.put(false, falseValue) + + resultMap.asInstanceOf[Map[Boolean, D]] + } + } + + Collector + .of[T, HashMap[Boolean, ArrayList[T]], Map[Boolean, D]]( + supplier, + accumulator, + ( + map1: HashMap[Boolean, ArrayList[T]], + map2: HashMap[Boolean, ArrayList[T]] + ) => { + map1.putAll(map2) + map1 + }, + finisher + ) + .asInstanceOf[Collector[T, AnyRef, Map[Boolean, D]]] + } + + def reducing[T](op: BinaryOperator[T]): Collector[T, AnyRef, Optional[T]] = { + type A = Array[Optional[T]] + + val supplier = new Supplier[A] { + def get(): A = { + val arr = new A(1) + arr(0) = Optional.empty[T]() + arr + } + } + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + val acc = accum(0) + + accum(0) = + if (acc.isEmpty()) Optional.of(element) + else Optional.of(op.apply(acc.get(), element)) + } + } + + Collector + .of[T, A, Optional[T]]( + supplier, + accumulator, + (arr1: Array[Optional[T]], arr2: Array[Optional[T]]) => + if (!arr1(0).isPresent()) arr2 + else if (!arr2(0).isPresent()) arr1 + else { + val result = new Array[Optional[T]](1) + result(0) = Optional.of(op.apply(arr1(0).get(), arr2(0).get())) + result + }, + acc => acc(0) + ) + .asInstanceOf[Collector[T, AnyRef, Optional[T]]] + } + + def reducing[T]( + identity: T, + op: BinaryOperator[T] + ): Collector[T, AnyRef, T] = { + type A = Array[T] + + val supplier = new Supplier[A] { + def get(): A = { + val arr = new Array[Object](1).asInstanceOf[A] + arr(0) = identity + arr + } + } + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + val acc = accum(0) + + accum(0) = op.apply(acc, element) + } + } + + Collector + .of[T, A, T]( + supplier, + accumulator, + (arr1: Array[T], arr2: Array[T]) => { + val result = new Array[Object](1).asInstanceOf[Array[T]] + result(0) = op.apply(arr1(0), arr2(0)) + result + }, + acc => acc(0) + ) + .asInstanceOf[Collector[T, AnyRef, T]] + } + + def reducing[T, U]( + identity: U, + mapper: Function[_ >: T, _ <: U], + op: BinaryOperator[U] + ): Collector[T, AnyRef, U] = { + type A = Array[U] + + val supplier = new Supplier[A] { + def get(): A = { + val arr = new Array[Object](1).asInstanceOf[A] + arr(0) = identity + arr + } + } + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + val acc = accum(0) + + accum(0) = op.apply(acc, mapper(element)) + } + } + + Collector + .of[T, A, U]( + supplier, + accumulator, + (arr1: Array[U], arr2: Array[U]) => { + val result = new Array[Object](1).asInstanceOf[Array[U]] + result(0) = op.apply(arr1(0), arr2(0)) + result + }, + acc => acc(0) + ) + .asInstanceOf[Collector[T, AnyRef, U]] + } + + def summarizingDouble[T]( + mapper: ToDoubleFunction[_ >: T] + ): Collector[T, AnyRef, DoubleSummaryStatistics] = { + type A = DoubleSummaryStatistics + + Collector + .of[T, A]( + () => new A, + (stats: A, e: T) => stats.accept(mapper.applyAsDouble(e)), + (stats1: A, stats2: A) => { + stats1.combine(stats2) + stats1 + } + ) + .asInstanceOf[Collector[T, AnyRef, DoubleSummaryStatistics]] + } + + def summarizingInt[T]( + mapper: ToIntFunction[_ >: T] + ): Collector[T, AnyRef, IntSummaryStatistics] = { + type A = IntSummaryStatistics + + Collector + .of[T, A]( + () => new A, + (stats: A, e: T) => stats.accept(mapper.applyAsInt(e)), + (stats1: A, stats2: A) => { + stats1.combine(stats2) + stats1 + } + ) + .asInstanceOf[Collector[T, AnyRef, IntSummaryStatistics]] + } + + def summarizingLong[T]( + mapper: ToLongFunction[_ >: T] + ): Collector[T, AnyRef, LongSummaryStatistics] = { + type A = LongSummaryStatistics + + Collector + .of[T, A]( + () => new A, + (stats: A, e: T) => stats.accept(mapper.applyAsLong(e)), + (stats1: A, stats2: A) => { + stats1.combine(stats2) + stats1 + } + ) + .asInstanceOf[Collector[T, AnyRef, LongSummaryStatistics]] + } + + def summingDouble[T]( + mapper: ToDoubleFunction[_ >: T] + ): Collector[T, AnyRef, Double] = { + type A = Array[Double] + + val supplier = new Supplier[A] { + def get(): A = { + val arr = new A(1) + arr(0) = 0.0 + arr + } + } + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + accum(0) = accum(0) + mapper.applyAsDouble(element) + } + } + + val combiner = new BinaryOperator[A] { + def apply(arr1: A, arr2: A): A = { + arr1(0) = arr1(0) + arr2(0) + arr1 + } + } + + Collector + .of[T, A, Double]( + supplier, + accumulator, + combiner, + (accum: A) => accum(0) + ) + .asInstanceOf[Collector[T, AnyRef, Double]] + } + + def summingInt[T]( + mapper: ToIntFunction[_ >: T] + ): Collector[T, AnyRef, Int] = { + type A = Array[Int] + + val supplier = new Supplier[A] { + def get(): A = { + val arr = new A(1) + arr(0) = 0 + arr + } + } + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + accum(0) = accum(0) + mapper.applyAsInt(element) + } + } + + val combiner = new BinaryOperator[A] { + def apply(arr1: A, arr2: A): A = { + arr1(0) = arr1(0) + arr2(0) + arr1 + } + } + + Collector + .of[T, A, Int]( + supplier, + accumulator, + combiner, + (accum: A) => accum(0) + ) + .asInstanceOf[Collector[T, AnyRef, Int]] + } + + def summingLong[T]( + mapper: ToLongFunction[_ >: T] + ): Collector[T, AnyRef, Long] = { + type A = Array[Long] + + val supplier = new Supplier[A] { + def get(): A = { + val arr = new A(1) + arr(0) = 0L + arr + } + } + + val accumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + accum(0) = accum(0) + mapper.applyAsLong(element) + } + } + + val combiner = new BinaryOperator[A] { + def apply(arr1: A, arr2: A): A = { + arr1(0) = arr1(0) + arr2(0) + arr1 + } + } + + Collector + .of[T, A, Long]( + supplier, + accumulator, + combiner, + (accum: A) => accum(0) + ) + .asInstanceOf[Collector[T, AnyRef, Long]] + } + + def teeing[T, R1, R2, R]( + downstream1: Collector[T, AnyRef, R1], + downstream2: Collector[T, AnyRef, R2], + merger: BiFunction[_ >: R1, _ >: R2, R] + ): Collector[T, AnyRef, R] = { + type A = Tuple2[AnyRef, AnyRef] + + val ds1Accumulator = downstream1.accumulator() // capture type1 + val ds2Accumulator = downstream2.accumulator() // capture type2 + + val lclSupplier = new Supplier[A] { + def get(): A = { + new A( + downstream1.supplier().get(), + downstream2.supplier().get() + ) + } + } + + val lclAccumulator = new BiConsumer[A, T] { + def accept(accum: A, element: T): Unit = { + ds1Accumulator.accept(accum._1, element) + ds2Accumulator.accept(accum._2, element) + } + } + + def determineCharacteristics( + set1: Set[Collector.Characteristics], + set2: Set[Collector.Characteristics] + ): Set[Collector.Characteristics] = { + + val hs = new HashSet[Collector.Characteristics] + + // The calling method uses a finisher(), so no IDENTITY_FINISH here. + + if (set1.contains(Collector.Characteristics.UNORDERED) + && set2.contains(Collector.Characteristics.UNORDERED)) + hs.add(Collector.Characteristics.UNORDERED) + + if (set1.contains(Collector.Characteristics.CONCURRENT) + && set2.contains(Collector.Characteristics.CONCURRENT)) + hs.add(Collector.Characteristics.CONCURRENT) + + hs + } + + val lclCombiner = new BinaryOperator[A] { + def apply(accum1: A, accum2: A): A = { + Tuple2( + downstream1.combiner()(accum1._1, accum2._1), + downstream2.combiner()(accum2._1, accum2._2) + ) + } + } + + val lclFinisher = + new Function[A, R] { + def apply(accum: A): R = { + merger( + downstream1.finisher()(accum._1), + downstream2.finisher()(accum._2) + ) + } + } + + collectorOf[T, A, R]( + lclSupplier, + lclAccumulator, + lclCombiner, + lclFinisher, + determineCharacteristics( + downstream1.characteristics(), + downstream2.characteristics() + ) + ) + .asInstanceOf[Collector[T, AnyRef, R]] + } + + def toCollection[T, C <: Collection[T]]( + collectionFactory: Supplier[C] + ): Collector[T, AnyRef, C] = { + + Collector + .of[T, C]( + collectionFactory, + (col: C, e: T) => col.add(e), + (col1: C, col2: C) => { + col1.addAll(col2) + col1 + } + ) + .asInstanceOf[Collector[T, AnyRef, C]] + } + + def toConcurrentMap[T, K, U]( + keyMapper: Function[_ >: T, _ <: K], + valueMapper: Function[_ >: T, _ <: U] + ): Collector[T, AnyRef, ConcurrentMap[K, U]] = { + type A = ConcurrentHashMap[K, U] + + Collector + .of[T, A]( + () => new A, + (map: A, e: T) => map.put(keyMapper(e), valueMapper(e)), + (map1: A, map2: A) => { + map1.putAll(map2) + map1 + }, + Collector.Characteristics.CONCURRENT, + Collector.Characteristics.UNORDERED + // This 4 arg Collector constructor will add IDENTITY_FINISH. + ) + .asInstanceOf[Collector[T, AnyRef, ConcurrentMap[K, U]]] + } + + def toConcurrentMap[T, K, U]( + keyMapper: Function[_ >: T, _ <: K], + valueMapper: Function[_ >: T, _ <: U], + mergeFunction: BinaryOperator[U] + ): Collector[T, AnyRef, ConcurrentMap[K, U]] = { + type A = ConcurrentHashMap[K, U] + + Collector + .of[T, A]( + () => new A, + ( + map: A, + e: T + ) => map.merge(keyMapper(e), valueMapper(e), mergeFunction), + (map1: A, map2: A) => { + map1.putAll(map2) + map1 + }, + Collector.Characteristics.CONCURRENT, + Collector.Characteristics.UNORDERED + // This 4 arg Collector constructor will add IDENTITY_FINISH. + ) + .asInstanceOf[Collector[T, AnyRef, ConcurrentMap[K, U]]] + } + + def toConcurrentMap[T, K, U, M <: ConcurrentMap[K, U]]( + keyMapper: Function[_ >: T, _ <: K], + valueMapper: Function[_ >: T, _ <: U], + mergeFunction: BinaryOperator[U], + mapFactory: Supplier[M] + ): Collector[T, AnyRef, M] = { + Collector + .of[T, M]( + () => mapFactory.get(), + ( + map: M, + e: T + ) => map.merge(keyMapper(e), valueMapper(e), mergeFunction), + (map1: M, map2: M) => { + map1.putAll(map2) + map1 + }, + Collector.Characteristics.CONCURRENT, + Collector.Characteristics.UNORDERED + // This 4 arg Collector constructor will add IDENTITY_FINISH. + ) + .asInstanceOf[Collector[T, AnyRef, M]] + } + + def toList[T](): Collector[T, AnyRef, List[T]] = { + type A = ArrayList[T] + + Collector + .of[T, A]( + () => new A, + (list: A, e: T) => list.add(e), + (list1: A, list2: A) => { + list1.addAll(list2) + list1 + } + ) + .asInstanceOf[Collector[T, AnyRef, List[T]]] + } + + def toMap[T, K, U]( + keyMapper: Function[_ >: T, _ <: K], + valueMapper: Function[_ >: T, _ <: U] + ): Collector[T, AnyRef, Map[K, U]] = { + type A = HashMap[K, U] + + Collector + .of[T, A]( + () => new A, + (map: A, e: T) => map.put(keyMapper(e), valueMapper(e)), + (map1: A, map2: A) => { + map1.putAll(map2) + map1 + } + ) + .asInstanceOf[Collector[T, AnyRef, Map[K, U]]] + } + + def toMap[T, K, U]( + keyMapper: Function[_ >: T, _ <: K], + valueMapper: Function[_ >: T, _ <: U], + mergeFunction: BinaryOperator[U] + ): Collector[T, AnyRef, Map[K, U]] = { + type A = HashMap[K, U] + + Collector + .of[T, A]( + () => new A, + ( + map: A, + e: T + ) => map.merge(keyMapper(e), valueMapper(e), mergeFunction), + (map1: A, map2: A) => { + map1.putAll(map2) + map1 + } + ) + .asInstanceOf[Collector[T, AnyRef, Map[K, U]]] + } + + def toMap[T, K, U, M <: Map[K, U]]( + keyMapper: Function[_ >: T, _ <: K], + valueMapper: Function[_ >: T, _ <: U], + mergeFunction: BinaryOperator[U], + mapFactory: Supplier[M] + ): Collector[T, AnyRef, M] = { + + Collector + .of[T, M]( + () => mapFactory.get(), + ( + map: M, + e: T + ) => map.merge(keyMapper(e), valueMapper(e), mergeFunction), + (map1: M, map2: M) => { + map1.putAll(map2) + map1 + } + ) + .asInstanceOf[Collector[T, AnyRef, M]] + } + + def toSet[T](): Collector[T, AnyRef, Set[T]] = { + type A = HashSet[T] + + Collector + .of[T, A]( + () => new A, + (set: A, e: T) => set.add(e), + (set1: A, set2: A) => { + set1.addAll(set2) + set1 + }, + Collector.Characteristics.UNORDERED + // This 4 arg Collector constructor will add IDENTITY_FINISH. + ) + .asInstanceOf[Collector[T, AnyRef, Set[T]]] + } + + // Since: Java 10 + def toUnmodifiableList[T](): Collector[T, AnyRef, List[T]] = { + Collectors.collectingAndThen[T, AnyRef, List[T], List[T]]( + Collectors.toList[T](), + (e: List[T]) => Collections.unmodifiableList[T](e) + ) + } + + // Since: Java 10 + def toUnmodifiableMap[T, K, U]( + keyMapper: Function[_ >: T, _ <: K], + valueMapper: Function[_ >: T, _ <: U] + ): Collector[T, AnyRef, Map[K, U]] = { + Collectors.collectingAndThen( + Collectors.toMap[T, K, U](keyMapper, valueMapper), + (e: Map[K, U]) => Collections.unmodifiableMap(e) + ) + } + + // Since: Java 10 + def toUnmodifiableMap[T, K, U]( + keyMapper: Function[_ >: T, _ <: K], + valueMapper: Function[_ >: T, _ <: U], + mergeFunction: BinaryOperator[U] + ): Collector[T, AnyRef, Map[K, U]] = { + Collectors.collectingAndThen( + Collectors.toMap[T, K, U](keyMapper, valueMapper, mergeFunction), + (e: Map[K, U]) => Collections.unmodifiableMap(e) + ) + } + + // Since: Java 10 + def toUnmodifiableSet[T](): Collector[T, AnyRef, Set[T]] = { + Collectors.collectingAndThen( + Collectors.toSet[T](), + (e: Set[T]) => Collections.unmodifiableSet(e) + ) + } + + private def collectorOf[T, A, R]( + _supplier: Supplier[A], + _accumulator: BiConsumer[A, T], + _combiner: BinaryOperator[A], + _finisher: Function[A, R], + _characteristics: Set[Collector.Characteristics] + ): Collector[T, A, R] = { + new Collector[T, A, R] { + def accumulator(): BiConsumer[A, T] = _accumulator + + def characteristics(): Set[Collector.Characteristics] = _characteristics + + def combiner(): BinaryOperator[A] = _combiner + + def finisher(): Function[A, R] = _finisher + + def supplier(): Supplier[A] = _supplier + } + } + +} diff --git a/javalib/src/main/scala/java/util/stream/DoubleStream.scala b/javalib/src/main/scala/java/util/stream/DoubleStream.scala new file mode 100644 index 0000000000..f473f6a0ca --- /dev/null +++ b/javalib/src/main/scala/java/util/stream/DoubleStream.scala @@ -0,0 +1,328 @@ +package java.util.stream + +import java.util._ +import java.util.function._ + +trait DoubleStream extends BaseStream[Double, DoubleStream] { + + def allMatch(pred: DoublePredicate): Boolean + + def anyMatch(pred: DoublePredicate): Boolean + + def average(): OptionalDouble + + def boxed(): Stream[Double] + + def collect[R]( + supplier: Supplier[R], + accumulator: ObjDoubleConsumer[R], + combiner: BiConsumer[R, R] + ): R + + def count(): Long + + def distinct(): DoubleStream + + // Since: Java 9 + def dropWhile(pred: DoublePredicate): DoubleStream = { + Objects.requireNonNull(pred) + + val spliter = this.spliterator() // also marks this stream "operated upon" + + // JVM appears to use an unsized iterator for dropWhile() + // May need to adjust other characteristics. + val unSized = spliter.characteristics() & + ~(Spliterator.SIZED | Spliterator.SUBSIZED) + + val spl = new Spliterators.AbstractDoubleSpliterator( + Long.MaxValue, + unSized + ) { + + override def trySplit(): Spliterator.OfDouble = + null.asInstanceOf[Spliterator.OfDouble] + + var doneDropping = false + + def tryAdvance(action: DoubleConsumer): Boolean = { + if (doneDropping) { + spliter.tryAdvance((e) => action.accept(e)) + } else { + var doneLooping = false + while (!doneLooping) { + val advanced = + spliter.tryAdvance((e) => { + if (!pred.test(e)) { + action.accept(e) + doneDropping = true + doneLooping = true + } + + }) + if (!advanced) + doneLooping = true + } + doneDropping // true iff some element was accepted + } + } + } + + new DoubleStreamImpl(spl, parallel = false, parent = this) + } + + def filter(pred: DoublePredicate): DoubleStream + + def findAny(): OptionalDouble + + def findFirst(): OptionalDouble + + def flatMap(mapper: DoubleFunction[_ <: DoubleStream]): DoubleStream + + def forEach(action: DoubleConsumer): Unit + + def forEachOrdered(action: DoubleConsumer): Unit + + def limit(maxSize: Long): DoubleStream + + def map(mapper: DoubleUnaryOperator): DoubleStream + + // Since: Java 16 + def mapMulti(mapper: DoubleStream.DoubleMapMultiConsumer): DoubleStream = { + + /* Design Note: + * This implementation differs from the reference default implementation + * described in the Java Stream#mapMulti documentation. + * + * That implementation is basically: + * this.flatMap(e => { + * val buffer = new ArrayList[R]() + * mapper.accept(e, r => buffer.add(r)) + * buffer.stream() + * }) + * + * It offers few of the benefits described for the multiMap method: + * reduced number of streams created, runtime efficiency, etc. + * + * This implementation should actually provide the benefits of mapMulti(). + */ + + val spliter = this.spliterator() // also marks this stream "operated upon" + + val buffer = new ArrayDeque[Double]() + + // Can not predict replacements, so Spliterator can not be SIZED. + // May need to adjust other characteristics. + val unSized = spliter.characteristics() & + ~(Spliterator.SIZED | Spliterator.SUBSIZED) + + val spl = + new Spliterators.AbstractDoubleSpliterator(Long.MaxValue, unSized) { + + def tryAdvance(action: DoubleConsumer): Boolean = { + var advanced = false + + var done = false + while (!done) { + if (buffer.size() == 0) { + val stepped = + spliter.tryAdvance((e: Double) => + mapper.accept(e, r => buffer.add(r)) + ) + done = !stepped + } else { + action.accept(buffer.removeFirst()) + advanced = true + done = true + } + } + + advanced + } + } + + new DoubleStreamImpl( + spl, + parallel = false, + parent = this.asInstanceOf[DoubleStream] + ) + } + + def mapToInt(mapper: DoubleToIntFunction): IntStream + + def mapToLong(mapper: DoubleToLongFunction): LongStream + + def mapToObj[U](mapper: DoubleFunction[_ <: U]): Stream[U] + + def max(): OptionalDouble + + def min(): OptionalDouble + + def noneMatch(pred: DoublePredicate): Boolean + + def peek(action: DoubleConsumer): DoubleStream + + def reduce(identity: Double, op: DoubleBinaryOperator): Double + + def reduce(op: DoubleBinaryOperator): OptionalDouble + + def skip(n: Long): DoubleStream + + def sorted(): DoubleStream + + def sum(): Double + + def summaryStatistics(): DoubleSummaryStatistics + + // Since: Java 9 + def takeWhile(pred: DoublePredicate): DoubleStream = { + Objects.requireNonNull(pred) + + val spliter = this.spliterator() // also marks this stream "operated upon" + + // JVM appears to use an unsized iterator for takeWhile() + // May need to adjust other characteristics. + val unSized = spliter.characteristics() & + ~(Spliterator.SIZED | Spliterator.SUBSIZED) + + val spl = new Spliterators.AbstractDoubleSpliterator( + Long.MaxValue, + unSized + ) { + var done = false // short-circuit + + override def trySplit(): Spliterator.OfDouble = + null.asInstanceOf[Spliterator.OfDouble] + + def tryAdvance(action: DoubleConsumer): Boolean = { + if (done) false + else + spliter.tryAdvance((e) => + if (!pred.test(e)) done = true + else action.accept(e) + ) + } + } + + new DoubleStreamImpl(spl, parallel = false, parent = this) + } + + def toArray(): Array[Double] + +} + +object DoubleStream { + + trait Builder extends DoubleConsumer { + def accept(t: Double): Unit + def add(t: Double): DoubleStream.Builder = { + accept(t) + this + } + def build(): DoubleStream + } + + @FunctionalInterface + trait DoubleMapMultiConsumer { + def accept(value: Double, dc: DoubleConsumer): Unit + } + + def builder(): DoubleStream.Builder = + new DoubleStreamImpl.Builder + + def concat(a: DoubleStream, b: DoubleStream): DoubleStream = + DoubleStreamImpl.concat(a, b) + + def empty(): DoubleStream = + new DoubleStreamImpl( + Spliterators.emptyDoubleSpliterator(), + parallel = false + ) + + def generate(s: DoubleSupplier): DoubleStream = { + val spliter = + new Spliterators.AbstractDoubleSpliterator(Long.MaxValue, 0) { + def tryAdvance(action: DoubleConsumer): Boolean = { + action.accept(s.getAsDouble()) + true + } + } + + new DoubleStreamImpl(spliter, parallel = false) + } + + // Since: Java 9 + def iterate( + seed: Double, + hasNext: DoublePredicate, + next: DoubleUnaryOperator + ): DoubleStream = { + // "seed" on RHS here is to keep compiler happy with local var init + var previous = seed + var seedUsed = false + + val spliter = + new Spliterators.AbstractDoubleSpliterator(Long.MaxValue, 0) { + def tryAdvance(action: DoubleConsumer): Boolean = { + val current = + if (seedUsed) next.applyAsDouble(previous) + else { + seedUsed = true + seed + } + + val advanceOK = hasNext.test(current) + if (advanceOK) { + action.accept(current) + previous = current + } + advanceOK + } + } + + new DoubleStreamImpl(spliter, parallel = false) + } + + def iterate( + seed: Double, + f: DoubleUnaryOperator + ): DoubleStream = { + var previous = seed // "seed" here is just to keep compiler happy. + var seedUsed = false + + val spliter = + new Spliterators.AbstractDoubleSpliterator(Long.MaxValue, 0) { + def tryAdvance(action: DoubleConsumer): Boolean = { + val current = + if (seedUsed) f.applyAsDouble(previous) + else { + seedUsed = true + seed + } + + action.accept(current) + previous = current + true + } + } + + new DoubleStreamImpl(spliter, parallel = false) + } + + def of(values: Array[Double]): DoubleStream = { + /* One would expect variables arguments to be declared as + * "values: Objects*" here. + * However, that causes "symbol not found" errors at OS link time. + * An implicit conversion must be missing in the javalib environment. + */ + + val bldr = DoubleStream.builder() + for (j <- values) + bldr.add(j) + + bldr.build() + } + + def of(t: Double): DoubleStream = + DoubleStream.builder().add(t).build() + +} diff --git a/javalib/src/main/scala/java/util/stream/DoubleStreamImpl.scala b/javalib/src/main/scala/java/util/stream/DoubleStreamImpl.scala new file mode 100644 index 0000000000..cccb258083 --- /dev/null +++ b/javalib/src/main/scala/java/util/stream/DoubleStreamImpl.scala @@ -0,0 +1,827 @@ +package java.util.stream + +import java.{lang => jl} +import java.{util => ju} +import java.util._ +import java.util.function._ + +private[stream] class DoubleStreamImpl( + val pipeline: ArrayDeque[DoubleStreamImpl] +) extends DoubleStream { + var _spliterArg: Spliterator.OfDouble = _ + var _supplier: Supplier[Spliterator.OfDouble] = _ + var _parallel: Boolean = _ // Scaffolding for later improvements. + var _characteristics: Int = 0 + + lazy val _spliter: Spliterator.OfDouble = + if (_spliterArg != null) _spliterArg + else _supplier.get() + + var _operatedUpon: Boolean = false + var _closed: Boolean = false + + // avoid allocating an onCloseQueue just to check if it is empty. + var onCloseQueueActive = false + lazy val onCloseQueue = new ArrayDeque[Runnable]() + + pipeline.addLast(this) + + def this( + spliterator: Spliterator.OfDouble, + parallel: Boolean + ) = { + this(new ArrayDeque[DoubleStreamImpl]) + _spliterArg = spliterator + _parallel = parallel + } + + def this( + spliterator: Spliterator.OfDouble, + parallel: Boolean, + parent: DoubleStream + ) = { + this(parent.asInstanceOf[DoubleStreamImpl].pipeline) + _spliterArg = spliterator + _parallel = parallel + } + + def this( + spliterator: Spliterator.OfDouble, + parallel: Boolean, + pipeline: ArrayDeque[DoubleStreamImpl] + ) = { + this(pipeline) + _spliterArg = spliterator + _parallel = parallel + } + + def this( + supplier: Supplier[Spliterator.OfDouble], + characteristics: Int, + parallel: Boolean + ) = { + this(new ArrayDeque[DoubleStreamImpl]) + _supplier = supplier + _parallel = parallel + _characteristics = characteristics + } + +// Methods specified in interface BaseStream ---------------------------- + + /* Throw IllegalStateException() if an attempt is made to operate + * on a stream a second time or after it has been closed. + * The JVM throws on most but not all "second" intermediate or terminal + * stream operations. The intent is that Scala Native match that set. + */ + + protected def commenceOperation(): Unit = { + if (_operatedUpon || _closed) + ObjectStreamImpl.throwIllegalStateException() + + _operatedUpon = true + } + + def close(): Unit = { + if (!_closed) { + val exceptionBuffer = new DoubleStreamImpl.CloseExceptionBuffer() + val it = pipeline.iterator() + + while (it.hasNext()) { + try { + it.next().closeStage() + } catch { + case e: Exception => exceptionBuffer.add(e) + } + } + + exceptionBuffer.reportExceptions() + } + } + + private def closeStage(): Unit = { + _closed = true + + val exceptionBuffer = new DoubleStreamImpl.CloseExceptionBuffer() + + if (onCloseQueueActive) { + val it = onCloseQueue.iterator() + while (it.hasNext()) { + try { + it.next().run() + } catch { + case e: Exception => exceptionBuffer.add(e) + } + } + } + + exceptionBuffer.reportExceptions() + } + + def isParallel(): Boolean = false + + def iterator(): ju.PrimitiveIterator.OfDouble = { + commenceOperation() + Spliterators.iterator(_spliter) + } + + def onClose(closeHandler: Runnable): DoubleStream = { + // JVM appears to not set "operated upon" here. + + if (_closed) + ObjectStreamImpl.throwIllegalStateException() + + // detects & throws on closeHandler == null + onCloseQueue.addLast(closeHandler) + + if (!onCloseQueueActive) + onCloseQueueActive = true + + this + } + + // parallel is not yet implemented. + def parallel(): DoubleStreamImpl = this + + def sequential(): DoubleStreamImpl = this + + def spliterator(): ju.Spliterator[_ <: Double] = { + commenceOperation() + _spliter.asInstanceOf[ju.Spliterator[_ <: Double]] + } + + def unordered(): DoubleStream = { + /* JVM has an unenforced requirment that a stream and its spliterator + * (can you say Harlan Ellison?) should have the same characteristics. + */ + + val masked = _spliter.characteristics() & Spliterator.ORDERED + + if (masked == Spliterator.ORDERED) this + else { + commenceOperation() + + // Clear ORDERED + val unordered = _spliter.characteristics() & ~(Spliterator.ORDERED) + + val spl = new Spliterators.AbstractDoubleSpliterator( + _spliter.estimateSize(), + unordered + ) { + def tryAdvance(action: DoubleConsumer): Boolean = + _spliter.tryAdvance((e: Double) => action.accept(e)) + } + + new DoubleStreamImpl(spl, _parallel, pipeline) + } + } + +// Methods specified in interface Stream -------------------------------- + + def allMatch(pred: DoublePredicate): Boolean = { + commenceOperation() + + // Be careful with documented "true" return for empty stream. + var mismatchFound = false + + while (!mismatchFound && + _spliter.tryAdvance((e: Double) => + if (!pred.test(e)) + mismatchFound = true + )) { /* search */ } + !mismatchFound + } + + def anyMatch(pred: DoublePredicate): Boolean = { + commenceOperation() + + var matchFound = false + + while (!matchFound && + _spliter.tryAdvance((e: Double) => + if (pred.test(e)) + matchFound = true + )) { /* search */ } + matchFound + } + + def average(): OptionalDouble = { + commenceOperation() + + var count = 0 + var sum = 0.0 + + _spliter.forEachRemaining((d: Double) => { count += 1; sum += d }) + if (count == 0) OptionalDouble.empty() + else OptionalDouble.of(sum / count) + } + + def boxed(): Stream[Double] = + this.mapToObj[Double](d => d) + + def collect[R]( + supplier: Supplier[R], + accumulator: ObjDoubleConsumer[R], + combiner: BiConsumer[R, R] + ): R = { + commenceOperation() + + val result = supplier.get() + + _spliter.forEachRemaining((e: Double) => accumulator.accept(result, e)) + + result + } + + def count(): Long = { + commenceOperation() + + var count = 0L + _spliter.forEachRemaining((d: Double) => count += 1) + count + } + + def distinct(): DoubleStream = { + commenceOperation() + + val seenElements = new ju.HashSet[Double]() + + // Some items may be dropped, so the estimated size is a high bound. + val estimatedSize = _spliter.estimateSize() + + val spl = + new Spliterators.AbstractDoubleSpliterator( + estimatedSize, + _spliter.characteristics() + ) { + def tryAdvance(action: DoubleConsumer): Boolean = { + var success = false + var done = false + while (!done) { + var advanced = + _spliter.tryAdvance((e: Double) => { + val added = seenElements.add(e) + + if (added) { + action.accept(e) + done = true + success = true + } + }) + if (!advanced) + done = true + } + success + } + } + + new DoubleStreamImpl(spl, _parallel, pipeline) + } + + def filter(pred: DoublePredicate): DoubleStream = { + commenceOperation() + + // Some items may be filtered out, so the estimated size is a high bound. + val estimatedSize = _spliter.estimateSize() + + val spl = new Spliterators.AbstractDoubleSpliterator( + estimatedSize, + _spliter.characteristics() + ) { + def tryAdvance(action: DoubleConsumer): Boolean = { + var success = false + var done = false + while (!done) { + var advanced = + _spliter.tryAdvance((e: Double) => { + if (pred.test(e)) { + action.accept(e) + done = true + success = true + } + }) + + if (!advanced) + done = true + } + success + } + } + + new DoubleStreamImpl(spl, _parallel, pipeline) + } + + /* delegating to findFirst() is an implementation ~~hack~~ expediency. + * Probably near-optimal for sequential streams. Parallel streams may + * offer better possibilities. + */ + def findAny(): OptionalDouble = { + // commenceOperation() // findFirst will call, so do not do twice. + findFirst() + } + + def findFirst(): OptionalDouble = { + commenceOperation() + var optional = OptionalDouble.empty() + _spliter.tryAdvance((e: Double) => { optional = OptionalDouble.of(e) }) + optional + } + + def flatMap( + mapper: DoubleFunction[_ <: DoubleStream] + ): DoubleStream = { + commenceOperation() + + val supplier = + new DoubleStreamImpl.DoublePrimitiveCompoundSpliteratorFactory( + _spliter, + mapper, + closeOnFirstTouch = true + ) + + val coercedPriorStages = pipeline + .asInstanceOf[ArrayDeque[DoubleStreamImpl]] + + new DoubleStreamImpl(supplier.get(), _parallel, coercedPriorStages) + } + + def forEach(action: DoubleConsumer): Unit = { + commenceOperation() + _spliter.forEachRemaining(action) + } + + def forEachOrdered(action: DoubleConsumer): Unit = { + commenceOperation() + _spliter.forEachRemaining(action) + } + + def limit(maxSize: Long): DoubleStream = { + if (maxSize < 0) + throw new IllegalArgumentException(maxSize.toString()) + + commenceOperation() // JVM tests argument before operatedUpon or closed. + + var nSeen = 0L + + val spl = new Spliterators.AbstractDoubleSpliterator( + maxSize, + _spliter.characteristics() + ) { + def tryAdvance(action: DoubleConsumer): Boolean = + if (nSeen >= maxSize) false + else { + var advanced = + _spliter.tryAdvance((e: Double) => action.accept(e)) + nSeen = + if (advanced) nSeen + 1 + else Long.MaxValue + + advanced + } + } + + new DoubleStreamImpl(spl, _parallel, pipeline) + } + + def map( + mapper: DoubleUnaryOperator + ): DoubleStream = { + commenceOperation() + + val spl = new Spliterators.AbstractDoubleSpliterator( + _spliter.estimateSize(), + _spliter.characteristics() + ) { + def tryAdvance(action: DoubleConsumer): Boolean = + _spliter.tryAdvance((e: Double) => + action.accept(mapper.applyAsDouble(e)) + ) + } + + new DoubleStreamImpl(spl, _parallel, pipeline) + } + + def mapToInt(mapper: DoubleToIntFunction): IntStream = + throw new UnsupportedOperationException("Not Yet Implemented") + + def mapToLong(mapper: DoubleToLongFunction): LongStream = + throw new UnsupportedOperationException("Not Yet Implemented") + + def mapToObj[U](mapper: DoubleFunction[_ <: U]): Stream[U] = { + + val spl = new Spliterators.AbstractSpliterator[U]( + _spliter.estimateSize(), + _spliter.characteristics() + ) { + def tryAdvance(action: Consumer[_ >: U]): Boolean = + _spliter.tryAdvance((e: Double) => action.accept(mapper(e))) + } + + new ObjectStreamImpl[U]( + spl, + _parallel, + pipeline + .asInstanceOf[ArrayDeque[ObjectStreamImpl[U]]] + ) + } + + def max(): OptionalDouble = { + commenceOperation() + + var max: Double = jl.Double.NEGATIVE_INFINITY + + var exitEarly = false // leave loop after first NaN encountered, if any. + + def body(d: Double): Unit = { + if (d.isNaN()) { + max = d + exitEarly = true + } else if (jl.Double.compare(max, d) < 0) { // sorts -0.0 lower than +0.0 + max = d + } + } + + val advanced = _spliter.tryAdvance((d: Double) => body(d)) + + if (!advanced) OptionalDouble.empty() + else { + while (!exitEarly && + _spliter.tryAdvance((d: Double) => body(d))) { /* search */ } + OptionalDouble.of(max) + } + } + + def min(): OptionalDouble = { + commenceOperation() + + var min: Double = jl.Double.POSITIVE_INFINITY + + var exitEarly = false // leave loop after first NaN encountered, if any. + + def body(d: Double): Unit = { + if (d.isNaN()) { + min = d + exitEarly = true + } else if (jl.Double.compare(min, d) > 0) { // sorts -0.0 lower than +0.0 + min = d + } + } + val advanced = _spliter.tryAdvance((d: Double) => body(d)) + + if (!advanced) OptionalDouble.empty() + else { + while (!exitEarly && + _spliter.tryAdvance((d: Double) => body(d))) { /* search */ } + OptionalDouble.of(min) + } + } + + def noneMatch(pred: DoublePredicate): Boolean = { + // anyMatch() will call commenceOperation() + !this.anyMatch(pred) + } + + def peek(action: DoubleConsumer): DoubleStream = { + commenceOperation() + + val peekAction = action + + val spl = new Spliterators.AbstractDoubleSpliterator( + _spliter.estimateSize(), + _spliter.characteristics() + ) { + + def tryAdvance(action: DoubleConsumer): Boolean = + _spliter.tryAdvance((e: Double) => { + peekAction.accept(e) + action.accept(e) + }) + } + + new DoubleStreamImpl(spl, _parallel, pipeline) + } + + def reduce(accumulator: DoubleBinaryOperator): OptionalDouble = { + commenceOperation() + + var reduceOpt = OptionalDouble.empty() + + _spliter.tryAdvance((e: Double) => reduceOpt = OptionalDouble.of(e)) + reduceOpt.ifPresent((first) => { + var previous = first + _spliter.forEachRemaining((e: Double) => + previous = accumulator.applyAsDouble(previous, e) + ) + reduceOpt = OptionalDouble.of(previous) + }) + + reduceOpt + } + + def reduce(identity: Double, accumulator: DoubleBinaryOperator): Double = { + commenceOperation() + + var accumulated = identity + + _spliter.forEachRemaining((e: Double) => + accumulated = accumulator.applyAsDouble(accumulated, e) + ) + accumulated + } + + def skip(n: Long): DoubleStream = { + if (n < 0) + throw new IllegalArgumentException(n.toString()) + + commenceOperation() // JVM tests argument before operatedUpon or closed. + + var nSkipped = 0L + + while ((nSkipped < n) + && (_spliter.tryAdvance((e: Double) => nSkipped += 1L))) { /* skip */ } + + // Follow JVM practice; return new stream, not remainder of "this" stream. + new DoubleStreamImpl(_spliter, _parallel, pipeline) + } + + def sorted(): DoubleStream = { + commenceOperation() + + /* Be aware that this method will/should throw on first use if type + * T is not Comparable[T]. This is described in the Java Stream doc. + * + * Implementation note: + * It would seem that Comparator.naturalOrder() + * could be used here. The SN complier complains, rightly, that + * T is not known to be [T <: Comparable[T]]. That is because + * T may actually not _be_ comparable. The comparator below punts + * the issue and raises an exception if T is, indeed, not comparable. + */ + + val buffer = new ArrayList[Double]() + _spliter.forEachRemaining((e: Double) => { buffer.add(e); () }) + + // See if there is a more efficient way of doing this. + val nElements = buffer.size() + val primitiveDoubles = new Array[Double](nElements) + for (j <- 0 until nElements) + primitiveDoubles(j) = buffer.get(j) + + Arrays.sort(primitiveDoubles) + Arrays.stream(primitiveDoubles) + } + + def sum(): Double = { + commenceOperation() + + var sum = 0.0 + + _spliter.forEachRemaining((d: Double) => sum += d) + sum + } + + def summaryStatistics(): DoubleSummaryStatistics = { + commenceOperation() + + val stats = new DoubleSummaryStatistics() + + _spliter.forEachRemaining((d: Double) => stats.accept(d)) + + stats + } + + def toArray(): Array[Double] = { + commenceOperation() + + val knownSize = _spliter.getExactSizeIfKnown() + + if (knownSize < 0) { + val buffer = new ArrayList[Double]() + _spliter.forEachRemaining((e: Double) => { buffer.add(e); () }) + + // See if there is a more efficient way of doing this. + val nElements = buffer.size() + val primitiveDoubles = new Array[Double](nElements) + for (j <- 0 until nElements) + primitiveDoubles(j) = buffer.get(j) + + primitiveDoubles + } else { + val primitiveDoubles = new Array[Double](knownSize.toInt) + var j = 0 + + _spliter.forEachRemaining((e: Double) => { + primitiveDoubles(j) = e + j += 1 + }) + primitiveDoubles + } + } + +} + +object DoubleStreamImpl { + + class Builder extends DoubleStream.Builder { + private val buffer = new ArrayList[Double]() + private var built = false + + override def accept(t: Double): Unit = + if (built) ObjectStreamImpl.throwIllegalStateException() + else buffer.add(t) + + override def build(): DoubleStream = { + built = true + // See if there is a more efficient way of doing this. + val nElements = buffer.size() + val primitiveDoubles = new Array[Double](nElements) + for (j <- 0 until nElements) + primitiveDoubles(j) = buffer.get(j) + + val spliter = Arrays.spliterator(primitiveDoubles) + + new DoubleStreamImpl(spliter, parallel = false) + } + } + + /* This does not depend on Double. As IntStreamImpl and LongStreamImpl + * are implemented, it should be moved to a common StreamHelpers.scala. + * Let it prove itself before propagating. + */ + private class CloseExceptionBuffer() { + val buffer = new ArrayDeque[Exception] + + def add(e: Exception): Unit = buffer.addLast(e) + + def reportExceptions(): Unit = { + if (!buffer.isEmpty()) { + val firstException = buffer.removeFirst() + + buffer.forEach(e => + if (e != firstException) + firstException.addSuppressed(e) + ) + + throw (firstException) + } + } + } + + private class DoublePrimitiveCompoundSpliteratorFactory( + spliter: Spliterator.OfDouble, + mapper: DoubleFunction[_ <: DoubleStream], + closeOnFirstTouch: Boolean + ) { + + def get(): ju.Spliterator.OfDouble = { + val substreams = + new Spliterators.AbstractSpliterator[DoubleStream]( + Long.MaxValue, + spliter.characteristics() + ) { + def tryAdvance(action: Consumer[_ >: DoubleStream]): Boolean = { + spliter.tryAdvance((e: Double) => action.accept(mapper(e))) + } + } + + new ju.Spliterator.OfDouble { + override def getExactSizeIfKnown(): Long = -1 + def characteristics(): Int = 0 + def estimateSize(): Long = Long.MaxValue + def trySplit(): Spliterator.OfDouble = + null.asInstanceOf[Spliterator.OfDouble] + + private var currentSpliter: ju.Spliterator.OfDouble = + Spliterators.emptyDoubleSpliterator() + + var currentStream = Optional.empty[DoubleStreamImpl]() + + def tryAdvance(action: DoubleConsumer): Boolean = { + var advanced = false + var done = false + + while (!done) { + if (currentSpliter.tryAdvance(action)) { + /* JVM flatMap() closes substreams on first touch. + * Stream.concat() does not. + */ + + if (closeOnFirstTouch) + currentStream.get().close() + + advanced = true + done = true + } else { + done = !substreams + .tryAdvance((e: DoubleStream) => + currentSpliter = { + val eOfDS = e.asInstanceOf[DoubleStreamImpl] + currentStream = Optional.of(eOfDS) + + /* Tricky bit here! + * Use internal _spliter and not public spliterator(). + * This method may have been called in a stream created + * by concat(). Following JVM practice, concat() + * set each of its input streams as "operated upon" + * before returning its stream. + * + * e.spliterator() checks _operatedUpon, which is true + * in a stream from concat(), and throws. + * Using _spliter skips that check and succeeds. + */ + + eOfDS._spliter + } + ) + } + } + advanced + } + } + } + } + + private class DoubleConcatSpliteratorFactory( + spliter: Spliterator[DoubleStream] + ) { + + def get(): ju.Spliterator.OfDouble = { + val substreams = spliter + + new ju.Spliterator.OfDouble { + override def getExactSizeIfKnown(): Long = -1 + def characteristics(): Int = 0 + def estimateSize(): Long = Long.MaxValue + def trySplit(): Spliterator.OfDouble = + null.asInstanceOf[Spliterator.OfDouble] + + private var currentSpliter: ju.Spliterator.OfDouble = + Spliterators.emptyDoubleSpliterator() + + var currentStream = Optional.empty[DoubleStreamImpl]() + + def tryAdvance(action: DoubleConsumer): Boolean = { + var advanced = false + var done = false + + while (!done) { + if (currentSpliter.tryAdvance(action)) { + advanced = true + done = true + } else { + done = !substreams + .tryAdvance((e: DoubleStream) => + currentSpliter = { + val eOfDS = e.asInstanceOf[DoubleStreamImpl] + currentStream = Optional.of(eOfDS) + + /* Tricky bit here! + * Use internal _spliter and not public spliterator(). + * This method may have been called in a stream created + * by concat(). Following JVM practice, concat() + * set each of its input streams as "operated upon" + * before returning its stream. + * + * e.spliterator() checks _operatedUpon, which is true + * in a stream from concat(), and throws. + * Using _spliter skips that check and succeeds. + */ + + eOfDS._spliter + } + ) + } + } + advanced + } + } + } + } + + def concat(a: DoubleStream, b: DoubleStream): DoubleStream = { + /* See ""Design Note" at corresponding place in ObjectStreamImpl. + * This implementaton shares the same noted "features". + */ + val aImpl = a.asInstanceOf[DoubleStreamImpl] + val bImpl = b.asInstanceOf[DoubleStreamImpl] + + aImpl.commenceOperation() + bImpl.commenceOperation() + + val arr = new Array[Object](2) + arr(0) = aImpl + arr(1) = bImpl + + val supplier = + new DoubleStreamImpl.DoubleConcatSpliteratorFactory( + Arrays.spliterator[DoubleStream](arr) + ) + + val pipelineA = aImpl.pipeline + val pipelineB = bImpl.pipeline + val pipelines = new ArrayDeque[DoubleStreamImpl](pipelineA) + pipelines.addAll(pipelineB) + + new DoubleStreamImpl(supplier.get(), parallel = false, pipelines) + } + +} diff --git a/javalib/src/main/scala/java/util/stream/ObjectStreamImpl.scala b/javalib/src/main/scala/java/util/stream/ObjectStreamImpl.scala new file mode 100644 index 0000000000..142be3743e --- /dev/null +++ b/javalib/src/main/scala/java/util/stream/ObjectStreamImpl.scala @@ -0,0 +1,935 @@ +package java.util.stream + +import java.{util => ju} +import java.util._ +import java.util.function._ +import java.util.stream.Collector._ + +private[stream] class ObjectStreamImpl[T]( + val pipeline: ArrayDeque[ObjectStreamImpl[T]] +) extends Stream[T] { + var _spliterArg: Spliterator[T] = _ + var _supplier: Supplier[Spliterator[T]] = _ + var _parallel: Boolean = _ // Scaffolding for later improvements. + var _characteristics: Int = 0 + + lazy val _spliter: Spliterator[T] = + if (_spliterArg != null) _spliterArg + else _supplier.get() + + var _operatedUpon: Boolean = false + var _closed: Boolean = false + + // avoid allocating an onCloseQueue just to check if it is empty. + var onCloseQueueActive = false + lazy val onCloseQueue = new ArrayDeque[Runnable]() + + pipeline.addLast(this) + + def this( + spliterator: Spliterator[T], + parallel: Boolean + ) = { + this(new ArrayDeque[ObjectStreamImpl[T]]) + _spliterArg = spliterator + _parallel = parallel + } + + def this( + spliterator: Spliterator[T], + parallel: Boolean, + parent: Stream[_ <: T] + ) = { + this(parent.asInstanceOf[ObjectStreamImpl[T]].pipeline) + _spliterArg = spliterator + _parallel = parallel + } + + def this( + spliterator: Spliterator[T], + parallel: Boolean, + pipeline: ArrayDeque[ObjectStreamImpl[T]] + ) = { + this(pipeline) + _spliterArg = spliterator + _parallel = parallel + } + + def this( + supplier: Supplier[Spliterator[T]], + characteristics: Int, + parallel: Boolean + ) = { + this(new ArrayDeque[ObjectStreamImpl[T]]) + _supplier = supplier + _parallel = parallel + _characteristics = characteristics + } + +// Methods specified in interface BaseStream ---------------------------- + + /* Throw IllegalStateException() if an attempt is made to operate + * on a stream a second time or after it has been closed. + * The JVM throws on most but not all "second" intermediate or terminal + * stream operations. The intent is that Scala Native match that set. + */ + + protected def commenceOperation(): Unit = { + if (_operatedUpon || _closed) + ObjectStreamImpl.throwIllegalStateException() + + _operatedUpon = true + } + + def close(): Unit = { + if (!_closed) { + val exceptionBuffer = new ObjectStreamImpl.CloseExceptionBuffer() + val it = pipeline.iterator() + + while (it.hasNext()) { + try { + it.next().closeStage() + } catch { + case e: Exception => exceptionBuffer.add(e) + } + } + + exceptionBuffer.reportExceptions() + } + } + + private def closeStage(): Unit = { + _closed = true + + val exceptionBuffer = new ObjectStreamImpl.CloseExceptionBuffer() + + if (onCloseQueueActive) { + val it = onCloseQueue.iterator() + while (it.hasNext()) { + try { + it.next().run() + } catch { + case e: Exception => exceptionBuffer.add(e) + } + } + } + + exceptionBuffer.reportExceptions() + } + + def isParallel(): Boolean = false + + def iterator(): ju.Iterator[T] = { + commenceOperation() + Spliterators.iterator[T](_spliter) + } + + def onClose(closeHandler: Runnable): Stream[T] = { + // JVM appears to not set "operated upon" here. + + if (_closed) + ObjectStreamImpl.throwIllegalStateException() + + // detects & throws on closeHandler == null + onCloseQueue.addLast(closeHandler) + + if (!onCloseQueueActive) + onCloseQueueActive = true + + this + } + + def parallel(): Stream[T] = this // parallel is not yet implemented. + + def sequential(): Stream[T] = this + + def spliterator(): Spliterator[_ <: T] = { + commenceOperation() + _spliter + } + + def unordered(): Stream[T] = { + /* JVM has an unenforced requirment that a stream and its spliterator + * (can you say Harlan Ellison?) should have the same characteristics. + */ + + val masked = _spliter.characteristics() & Spliterator.ORDERED + + if (masked == Spliterator.ORDERED) this + else { + commenceOperation() + + // Clear ORDERED + val unordered = _spliter.characteristics() & ~(Spliterator.ORDERED) + + val spl = new Spliterators.AbstractSpliterator[T]( + _spliter.estimateSize(), + unordered + ) { + def tryAdvance(action: Consumer[_ >: T]): Boolean = + _spliter.tryAdvance((e) => action.accept(e)) + } + + new ObjectStreamImpl[T](spl, _parallel, pipeline) + } + } + +// Methods specified in interface Stream -------------------------------- + + def allMatch(pred: Predicate[_ >: T]): Boolean = { + commenceOperation() + + // Be careful with documented "true" return for empty stream. + var mismatchFound = false + + while (!mismatchFound && + _spliter.tryAdvance((e: T) => + if (!pred.test(e)) + mismatchFound = true + )) { /* search */ } + !mismatchFound + } + + def anyMatch(pred: Predicate[_ >: T]): Boolean = { + commenceOperation() + + var matchFound = false + + while (!matchFound && + _spliter.tryAdvance((e: T) => + if (pred.test(e)) + matchFound = true + )) { /* search */ } + matchFound + } + + def collect[R, A](collector: Collector[_ >: T, A, R]): R = { + // Loosely following the example in the JDK 8 stream.Collector doc. + commenceOperation() + + val supplier = collector.supplier() + val accumulator = collector.accumulator() + // combiner unused in this sequential-only implementation + val finisher = collector.finisher() + + val workInProgress = supplier.get() + + _spliter.forEachRemaining((e) => accumulator.accept(workInProgress, e)) + + /* This check is described in the JVM docs. Seems more costly to + * create & check the Characteristics set than to straight out + * execute an identity finisher(). + * Go figure, it made sense to the JVM doc writers. + */ + if (collector.characteristics().contains(Characteristics.IDENTITY_FINISH)) + workInProgress.asInstanceOf[R] + else + finisher.apply(workInProgress) + } + + def collect[R]( + supplier: Supplier[R], + accumulator: BiConsumer[R, _ >: T], + combiner: BiConsumer[R, R] + ): R = { + commenceOperation() + + val result = supplier.get() + + _spliter.forEachRemaining((e) => accumulator.accept(result, e)) + + result + } + + def count(): Long = { + commenceOperation() + + var count = 0L + _spliter.forEachRemaining(e => count += 1) + count + } + + def distinct(): Stream[T] = { + commenceOperation() + + val seenElements = new ju.HashSet[T]() + + // Some items may be dropped, so the estimated size is a high bound. + val estimatedSize = _spliter.estimateSize() + + val spl = + new Spliterators.AbstractSpliterator[T]( + estimatedSize, + _spliter.characteristics() + ) { + def tryAdvance(action: Consumer[_ >: T]): Boolean = { + var success = false + var done = false + while (!done) { + var advanced = + _spliter.tryAdvance((e) => { + val added = seenElements.add(e) + + if (added) { + action.accept(e) + done = true + success = true + } + }) + if (!advanced) + done = true + } + success + } + } + + new ObjectStreamImpl[T](spl, _parallel, pipeline) + } + + def filter(pred: Predicate[_ >: T]): Stream[T] = { + commenceOperation() + + // Some items may be filtered out, so the estimated size is a high bound. + val estimatedSize = _spliter.estimateSize() + + val spl = new Spliterators.AbstractSpliterator[T]( + estimatedSize, + _spliter.characteristics() + ) { + def tryAdvance(action: Consumer[_ >: T]): Boolean = { + var success = false + var done = false + while (!done) { + var advanced = + _spliter.tryAdvance((e) => { + if (pred.test(e)) { + action.accept(e) + done = true + success = true + } + }) + + if (!advanced) + done = true + } + success + } + } + new ObjectStreamImpl[T](spl, _parallel, pipeline) + } + + /* delegating to findFirst() is an implementation ~~hack~~ expediency. + * Probably near-optimal for sequential streams. Parallel streams may + * offer better possibilities. + */ + def findAny(): Optional[T] = { + // commenceOperation() // findFirst will call, so do not do twice. + findFirst() + } + + def findFirst(): Optional[T] = { + commenceOperation() + var optional = Optional.empty[T]() + _spliter.tryAdvance((e) => { optional = Optional.of(e.asInstanceOf[T]) }) + optional + } + + def flatMap[R]( + mapper: Function[_ >: T, _ <: Stream[_ <: R]] + ): Stream[R] = { + commenceOperation() + + val csf = new ObjectStreamImpl.CompoundSpliteratorFactory[T, R]( + _spliter, + mapper, + closeOnFirstTouch = true + ) + + val coercedPriorStages = pipeline + .asInstanceOf[ArrayDeque[ObjectStreamImpl[R]]] + + new ObjectStreamImpl[R](csf.get(), _parallel, coercedPriorStages) + } + + def flatMapToDouble( + mapper: Function[_ >: T, _ <: DoubleStream] + ): DoubleStream = { + commenceOperation() + + val supplier = + new ObjectStreamImpl.DoublePrimitiveCompoundSpliteratorFactory[T]( + _spliter, + mapper, + closeOnFirstTouch = true + ) + + val coercedPriorStages = pipeline + .asInstanceOf[ArrayDeque[DoubleStreamImpl]] + + new DoubleStreamImpl(supplier.get(), _parallel, coercedPriorStages) + } + + def flatMapToInt( + mapper: Function[_ >: T, _ <: IntStream] + ): IntStream = { + commenceOperation() + + throw new UnsupportedOperationException("Not Yet Implemented") + } + + def flatMapToLong( + mapper: Function[_ >: T, _ <: LongStream] + ): LongStream = { + commenceOperation() + + throw new UnsupportedOperationException("Not Yet Implemented") + } + + def forEach(action: Consumer[_ >: T]): Unit = { + _spliter.forEachRemaining(action) + } + + def forEachOrdered(action: Consumer[_ >: T]): Unit = { + commenceOperation() + _spliter.forEachRemaining(action) + } + + def limit(maxSize: Long): Stream[T] = { + if (maxSize < 0) + throw new IllegalArgumentException(maxSize.toString()) + + commenceOperation() // JVM tests argument before operatedUpon or closed. + + var nSeen = 0L + + val spl = new Spliterators.AbstractSpliterator[T]( + maxSize, + _spliter.characteristics() + ) { + def tryAdvance(action: Consumer[_ >: T]): Boolean = + if (nSeen >= maxSize) false + else { + var advanced = + _spliter.tryAdvance((e) => action.accept(e)) + nSeen = + if (advanced) nSeen + 1 + else Long.MaxValue + + advanced + } + } + + new ObjectStreamImpl[T](spl, _parallel, pipeline) + } + + def map[R]( + mapper: Function[_ >: T, _ <: R] + ): Stream[R] = { + commenceOperation() + + val spl = new Spliterators.AbstractSpliterator[R]( + _spliter.estimateSize(), + _spliter.characteristics() + ) { + def tryAdvance(action: Consumer[_ >: R]): Boolean = + _spliter.tryAdvance((e) => action.accept(mapper(e))) + } + + /* Ugly type handling! but part of map()'s job is to mung types. + * Type erasure is what makes this work, once one lies to the compiler + * about the types involved. + */ + new ObjectStreamImpl[T]( + spl.asInstanceOf[Spliterator[T]], + _parallel, + pipeline + ) + .asInstanceOf[Stream[R]] + } + + def mapToDouble(mapper: ToDoubleFunction[_ >: T]): DoubleStream = { + commenceOperation() + + val spl = new Spliterators.AbstractDoubleSpliterator( + _spliter.estimateSize(), + _spliter.characteristics() + ) { + def tryAdvance(action: DoubleConsumer): Boolean = + _spliter.tryAdvance((e: T) => action.accept(mapper.applyAsDouble(e))) + } + + val coercedPriorStages = pipeline + .asInstanceOf[ArrayDeque[DoubleStreamImpl]] + + new DoubleStreamImpl( + spl, + _parallel, + coercedPriorStages + ) + .asInstanceOf[DoubleStream] + + } + + def mapToInt(mapper: ToIntFunction[_ >: T]): IntStream = + throw new UnsupportedOperationException("Not Yet Implemented") + + def mapToLong(mapper: ToLongFunction[_ >: T]): LongStream = + throw new UnsupportedOperationException("Not Yet Implemented") + + def max(comparator: Comparator[_ >: T]): Optional[T] = { + commenceOperation() + + var maxOpt = Optional.empty[T]() + + _spliter.tryAdvance((e) => maxOpt = Optional.of(e.asInstanceOf[T])) + + maxOpt.ifPresent((first) => { + var max = first + _spliter.forEachRemaining((e) => + if (comparator.compare(max, e.asInstanceOf[T]) < 0) + max = e.asInstanceOf[T] + ) + maxOpt = Optional.of(max) + }) + + maxOpt + } + + def min(comparator: Comparator[_ >: T]): Optional[T] = { + commenceOperation() + + var minOpt = Optional.empty[T]() + + _spliter.tryAdvance((e) => minOpt = Optional.of(e.asInstanceOf[T])) + + minOpt.ifPresent((first) => { + var min = first + _spliter.forEachRemaining((e) => + if (comparator.compare(min, e.asInstanceOf[T]) > 0) + min = e.asInstanceOf[T] + ) + minOpt = Optional.of(min) + }) + + minOpt + } + + def noneMatch(pred: Predicate[_ >: T]): Boolean = { + // anyMatch() will call commenceOperation() + !this.anyMatch(pred) + } + + def peek(action: Consumer[_ >: T]): Stream[T] = { + commenceOperation() + + val peekAction = action + + val spl = new Spliterators.AbstractSpliterator[T]( + _spliter.estimateSize(), + _spliter.characteristics() + ) { + + def tryAdvance(action: Consumer[_ >: T]): Boolean = + _spliter.tryAdvance((e) => { + peekAction.accept(e) + action.accept(e) + }) + } + + new ObjectStreamImpl[T](spl, _parallel, pipeline) + } + + def reduce(accumulator: BinaryOperator[T]): Optional[T] = { + commenceOperation() + + var reduceOpt = Optional.empty[T]() + + _spliter.tryAdvance((e) => reduceOpt = Optional.of(e.asInstanceOf[T])) + reduceOpt.ifPresent((first) => { + var previous = first + _spliter.forEachRemaining((e) => + previous = accumulator.apply(previous, e) + ) + reduceOpt = Optional.of(previous) + }) + + reduceOpt + } + + def reduce(identity: T, accumulator: BinaryOperator[T]): T = { + commenceOperation() + + var accumulated = identity + + _spliter.forEachRemaining((e) => + accumulated = accumulator.apply(accumulated, e) + ) + accumulated + } + + def reduce[U]( + identity: U, + accumulator: BiFunction[U, _ >: T, U], + combiner: BinaryOperator[U] + ): U = { + commenceOperation() + + var accumulated = identity + + _spliter.forEachRemaining((e) => + accumulated = accumulator.apply(accumulated, e) + ) + accumulated + } + + def skip(n: Long): Stream[T] = { + if (n < 0) + throw new IllegalArgumentException(n.toString()) + + commenceOperation() // JVM tests argument before operatedUpon or closed. + + var nSkipped = 0L + + while ((nSkipped < n) + && (_spliter.tryAdvance((e) => nSkipped += 1L))) { /* skip */ } + + // Follow JVM practice; return new stream, not remainder of "this" stream. + new ObjectStreamImpl[T](_spliter, _parallel, pipeline) + } + + def sorted(): Stream[T] = { + // No commenceOperation() here. sorted(comparator) will make that happen. + + /* Be aware that this method will/should throw on first use if type + * T is not Comparable[T]. This is described in the Java Stream doc. + * + * Implementation note: + * It would seem that Comparator.naturalOrder() + * could be used here. The SN complier complains, rightly, that + * T is not known to be [T <: Comparable[T]]. That is because + * T may actually not _be_ comparable. The comparator below punts + * the issue and raises an exception if T is, indeed, not comparable. + */ + + val comparator = new Comparator[T] { + def compare(o1: T, o2: T): Int = + o1.asInstanceOf[Comparable[Any]].compareTo(o2) + } + + sorted(comparator) + } + + def sorted(comparator: Comparator[_ >: T]): Stream[T] = { + commenceOperation() + + val buffer = new ArrayList[T]() + _spliter.forEachRemaining((e) => buffer.add(e)) + + buffer.sort(comparator) + buffer.stream() + } + + def toArray(): Array[Object] = { + commenceOperation() + + val knownSize = _spliter.getExactSizeIfKnown() + + if (knownSize < 0) { + val buffer = new ArrayList[T]() + _spliter.forEachRemaining((e: T) => buffer.add(e)) + buffer.toArray() + } else { + val dst = new Array[Object](knownSize.toInt) + var j = 0 + _spliter.forEachRemaining((e) => { + dst(j) = e.asInstanceOf[Object] + j += 1 + }) + dst + } + } + + def toArray[A <: Object](generator: IntFunction[Array[A]]): Array[A] = { + commenceOperation() + + val knownSize = _spliter.getExactSizeIfKnown() + if (knownSize < 0) { + toArray().asInstanceOf[Array[A]] + } else { + val dst = generator(knownSize.toInt) + var j = 0 + _spliter.forEachRemaining((e: T) => { + dst(j) = e.asInstanceOf[A] + j += 1 + }) + dst + } + } + +} + +object ObjectStreamImpl { + + class Builder[T] extends Stream.Builder[T] { + private var built = false + private val buffer = new ArrayList[T]() + + override def accept(t: T): Unit = + if (built) ObjectStreamImpl.throwIllegalStateException() + else buffer.add(t) + + override def build(): Stream[T] = { + built = true + val spliter = buffer.spliterator() + new ObjectStreamImpl(spliter, parallel = false) + } + } + + private class CloseExceptionBuffer() { + val buffer = new ArrayDeque[Exception] + + def add(e: Exception): Unit = buffer.addLast(e) + + def reportExceptions(): Unit = { + /* + val it = buffer.iterator() + + if (it.hasNext()) { + val firstException = it.next() + + while (it.hasNext()) { + val e = it.next() + if (e != firstException) + firstException.addSuppressed(e) + } + + throw (firstException) + } + */ + if (!buffer.isEmpty()) { + val firstException = buffer.removeFirst() + + buffer.forEach(e => + if (e != firstException) + firstException.addSuppressed(e) + ) + + throw (firstException) + } + } + + } + + private class CompoundSpliteratorFactory[T, R]( + spliter: Spliterator[T], + mapper: Function[_ >: T, _ <: Stream[_ <: R]], + closeOnFirstTouch: Boolean + ) { + /* Design note: + * Yes, it is passing strange that flatMap + * (closeOnFirstTouch == true ) tryAdvance() is advancing + * along closed streams. Unusual! + * + * That seems to be what Java flatMap() traversal is doing: + * run close handler once, on first successful tryAdvance() of + * each component stream. + */ + + def get(): ju.Spliterator[R] = { + val substreams = + new Spliterators.AbstractSpliterator[Stream[T]]( + Long.MaxValue, + spliter.characteristics() + ) { + def tryAdvance(action: Consumer[_ >: Stream[T]]): Boolean = { + spliter.tryAdvance(e => + action.accept(mapper(e).asInstanceOf[Stream[T]]) + ) + } + } + + new ju.Spliterator[R] { + override def getExactSizeIfKnown(): Long = -1 + def characteristics(): Int = 0 + def estimateSize(): Long = Long.MaxValue + def trySplit(): Spliterator[R] = null.asInstanceOf[Spliterator[R]] + + private var currentSpliter: ju.Spliterator[_ <: R] = + Spliterators.emptySpliterator[R]() + + var currentStream = Optional.empty[ObjectStreamImpl[R]]() + + def tryAdvance(action: Consumer[_ >: R]): Boolean = { + var advanced = false + var done = false + + while (!done) { + if (currentSpliter.tryAdvance(action)) { + /* JVM flatMap() closes substreams on first touch. + * Stream.concat() does not. + */ + + if (closeOnFirstTouch) + currentStream.get().close() + + advanced = true + done = true + } else { + done = !substreams + .tryAdvance((e) => + currentSpliter = { + val eOfR = e.asInstanceOf[ObjectStreamImpl[R]] + currentStream = Optional.of(eOfR) + + /* Tricky bit here! + * Use internal _spliter and not public spliterator(). + * This method may have been called in a stream created + * by concat(). Following JVM practice, concat() + * set each of its input streams as "operated upon" + * before returning its stream. + * + * e.spliterator() checks _operatedUpon, which is true + * in a stream from concat(), and throws. + * Using _spliter skips that check and succeeds. + */ + + eOfR._spliter + } + ) + } + } + advanced + } + } + } + } + + private class DoublePrimitiveCompoundSpliteratorFactory[T]( + spliter: Spliterator[T], + mapper: Function[_ >: T, _ <: DoubleStream], + closeOnFirstTouch: Boolean + ) { + + def get(): ju.Spliterator.OfDouble = { + val substreams = + new Spliterators.AbstractSpliterator[DoubleStream]( + Long.MaxValue, + spliter.characteristics() + ) { + def tryAdvance(action: Consumer[_ >: DoubleStream]): Boolean = { + spliter.tryAdvance(e => action.accept(mapper(e))) + } + } + + new ju.Spliterator.OfDouble { + override def getExactSizeIfKnown(): Long = -1 + def characteristics(): Int = 0 + def estimateSize(): Long = Long.MaxValue + def trySplit(): Spliterator.OfDouble = + null.asInstanceOf[Spliterator.OfDouble] + + private var currentSpliter: ju.Spliterator.OfDouble = + Spliterators.emptyDoubleSpliterator() + + var currentStream = Optional.empty[DoubleStreamImpl]() + + def tryAdvance(action: DoubleConsumer): Boolean = { + var advanced = false + var done = false + + while (!done) { + if (currentSpliter.tryAdvance(action)) { + /* JVM flatMap() closes substreams on first touch. + * Stream.concat() does not. + */ + + if (closeOnFirstTouch) + currentStream.get().close() + + advanced = true + done = true + } else { + done = !substreams + .tryAdvance((e: DoubleStream) => + currentSpliter = { + val eOfDS = e.asInstanceOf[DoubleStreamImpl] + currentStream = Optional.of(eOfDS) + + /* Tricky bit here! + * Use internal _spliter and not public spliterator(). + * This method may have been called in a stream created + * by concat(). Following JVM practice, concat() + * set each of its input streams as "operated upon" + * before returning its stream. + * + * e.spliterator() checks _operatedUpon, which is true + * in a stream from concat(), and throws. + * Using _spliter skips that check and succeeds. + */ + + eOfDS._spliter + } + ) + } + } + advanced + } + } + } + } + + def concat[T](a: Stream[_ <: T], b: Stream[_ <: T]): Stream[T] = { + /* Design Note: + * This implementation may not comply with the following section + * of the JVM description of the Stream#concat method. + * "This method operates on the two input streams and binds each + * stream to its source. As a result subsequent modifications to an + * input stream source may not be reflected in the concatenated + * stream result." + * + * If I understand correctly, this implementation is late binding + * and the specification is for early binding. This is a rare event. + * Usually the defect is the other way around: early when late needed. + */ + + /* Design Note: + * At first impression, concat could be succinctly implemented as: + * Stream.of(a, b).flatMap[T](Function.identity()) + * + * This implementation exists because JVM flatMap(), hence SN flatMap(), + * closes each stream as it touches it. JVM concat() closes zero + * streams until a final explicit close() happens. A subtle difference, + * until the bug reports start pouring in. + */ + + val aImpl = a.asInstanceOf[ObjectStreamImpl[T]] + val bImpl = b.asInstanceOf[ObjectStreamImpl[T]] + + aImpl.commenceOperation() + bImpl.commenceOperation() + + val arr = new Array[Object](2) + arr(0) = aImpl + arr(1) = bImpl + + val csf = new CompoundSpliteratorFactory[Stream[T], T]( + Arrays.spliterator[Stream[T]](arr), + Function.identity(), + closeOnFirstTouch = false + ) + + val pipelineA = aImpl.pipeline + val pipelineB = bImpl.pipeline + val pipelines = new ArrayDeque[ObjectStreamImpl[T]](pipelineA) + pipelines.addAll(pipelineB) + + new ObjectStreamImpl[T](csf.get(), parallel = false, pipelines) + } + + def throwIllegalStateException(): Unit = { + throw new IllegalStateException( + "stream has already been operated upon or closed" + ) + } + +} diff --git a/javalib/src/main/scala/java/util/stream/Stream.scala b/javalib/src/main/scala/java/util/stream/Stream.scala index bdd6568b9c..6263667e11 100644 --- a/javalib/src/main/scala/java/util/stream/Stream.scala +++ b/javalib/src/main/scala/java/util/stream/Stream.scala @@ -1,12 +1,313 @@ package java.util.stream -import java.util.function.{Consumer, Function, Predicate} -import scala.scalanative.compat.StreamsCompat._ +import java.util._ +import java.util.function._ + +trait Stream[T] extends BaseStream[T, Stream[T]] { + + def allMatch(pred: Predicate[_ >: T]): Boolean + + def anyMatch(pred: Predicate[_ >: T]): Boolean + + def collect[R, A](collector: Collector[_ >: T, A, R]): R + + def collect[R]( + supplier: Supplier[R], + accumulator: BiConsumer[R, _ >: T], + combiner: BiConsumer[R, R] + ): R + + def count(): Long + + def distinct(): Stream[T] + + // Since: Java 9 + def dropWhile(pred: Predicate[_ >: T]): Stream[T] = { + Objects.requireNonNull(pred) + + val spliter = this.spliterator() // also marks this stream "operated upon" + + // JVM appears to use an unsized iterator for dropWhile() + // May need to adjust other characteristics. + val unSized = spliter.characteristics() & + ~(Spliterator.SIZED | Spliterator.SUBSIZED) + + val spl = new Spliterators.AbstractSpliterator[T]( + Long.MaxValue, + unSized + ) { + + override def trySplit(): Spliterator[T] = + null.asInstanceOf[Spliterator[T]] + + var doneDropping = false + + def tryAdvance(action: Consumer[_ >: T]): Boolean = { + if (doneDropping) { + spliter.tryAdvance((e) => action.accept(e)) + } else { + var doneLooping = false + while (!doneLooping) { + val advanced = + spliter.tryAdvance((e) => { + if (!pred.test(e)) { + action.accept(e) + doneDropping = true + doneLooping = true + } + + }) + if (!advanced) + doneLooping = true + } + doneDropping // true iff some element was accepted + } + } + } + + new ObjectStreamImpl[T](spl, parallel = false, parent = this) + } -trait Stream[+T] extends BaseStream[T, Stream[T]] { - def flatMap[R](mapper: Function[_ >: T, _ <: Stream[_ <: R]]): Stream[R] def filter(pred: Predicate[_ >: T]): Stream[T] + + def findAny(): Optional[T] + + def findFirst(): Optional[T] + + def flatMap[R](mapper: Function[_ >: T, _ <: Stream[_ <: R]]): Stream[R] + + def flatMapToDouble( + mapper: Function[_ >: T, _ <: DoubleStream] + ): DoubleStream + + def flatMapToInt( + mapper: Function[_ >: T, _ <: IntStream] + ): IntStream + + def flatMapToLong( + mapper: Function[_ >: T, _ <: LongStream] + ): LongStream + def forEach(action: Consumer[_ >: T]): Unit + + def forEachOrdered(action: Consumer[_ >: T]): Unit + + def limit(maxSize: Long): Stream[T] + + def map[R](mapper: Function[_ >: T, _ <: R]): Stream[R] + + // Since: Java 16 + def mapMulti[R](mapper: BiConsumer[_ >: T, Consumer[_ >: R]]): Stream[R] = { + /* Design Note: + * This implementation differs from the reference default implementation + * described in the Java Stream#mapMulti documentation. + * + * That implementation is basically: + * this.flatMap(e => { + * val buffer = new ArrayList[R]() + * mapper.accept(e, r => buffer.add(r)) + * buffer.stream() + * }) + * + * It offers few of the benefits described for the multiMap method: + * reduced number of streams created, runtime efficiency, etc. + * + * This implementation should actually provide the benefits of mapMulti(). + */ + + val spliter = this.spliterator() // also marks this stream "operated upon" + + val buffer = new ArrayDeque[R]() + + // Can not predict replacements, so Spliterator can not be SIZED. + // May need to adjust other characteristics. + val unSized = spliter.characteristics() & + ~(Spliterator.SIZED | Spliterator.SUBSIZED) + + val spl = new Spliterators.AbstractSpliterator[R](Long.MaxValue, unSized) { + + def tryAdvance(action: Consumer[_ >: R]): Boolean = { + var advanced = false + + var done = false + while (!done) { + if (buffer.size() == 0) { + val stepped = + spliter.tryAdvance(e => mapper.accept(e, r => buffer.add(r))) + done = !stepped + } else { + action.accept(buffer.removeFirst()) + advanced = true + done = true + } + } + + advanced + } + } + + (new ObjectStreamImpl[R]( + spl, + parallel = false, + parent = this.asInstanceOf[Stream[R]] + )) + .asInstanceOf[Stream[R]] + } + + // Since: Java 16 + def mapMultiToDouble( + mapper: BiConsumer[_ >: T, _ >: DoubleConsumer] + ): DoubleStream = { + // See implementation notes in mapMulti[R]() + + val spliter = this.spliterator() // also marks this stream "operated upon" + + val buffer = new ArrayDeque[Double]() + + // Can not predict replacements, so Spliterator can not be SIZED. + // May need to adjust other characteristics. + val unSized = spliter.characteristics() & + ~(Spliterator.SIZED | Spliterator.SUBSIZED) + + val spl = + new Spliterators.AbstractDoubleSpliterator(Long.MaxValue, unSized) { + val dc: DoubleConsumer = doubleValue => buffer.add(doubleValue) + + def tryAdvance(action: DoubleConsumer): Boolean = { + var advanced = false + + var done = false + while (!done) { + if (buffer.size() == 0) { + val stepped = spliter.tryAdvance(e => mapper.accept(e, dc)) + done = !stepped + } else { + action.accept(buffer.removeFirst()) + advanced = true + done = true + } + } + + advanced + } + } + + val coercedPriorStages = this + .asInstanceOf[ObjectStreamImpl[T]] + .pipeline + .asInstanceOf[ArrayDeque[DoubleStreamImpl]] + + (new DoubleStreamImpl( + spl, + parallel = false, + coercedPriorStages + )) + .asInstanceOf[DoubleStream] + } + + // Since: Java 16 + def mapMultiToInt( + mapper: BiConsumer[_ >: T, _ >: IntConsumer] + ): IntStream = { + throw new UnsupportedOperationException("Not Yet Implemented") + } + + // Since: Java 16 + def mapMultiToLong( + mapper: BiConsumer[_ >: T, _ >: LongConsumer] + ): LongStream = { + throw new UnsupportedOperationException("Not Yet Implemented") + } + + def mapToDouble(mapper: ToDoubleFunction[_ >: T]): DoubleStream + + def mapToInt(mapper: ToIntFunction[_ >: T]): IntStream + + def mapToLong(mapper: ToLongFunction[_ >: T]): LongStream + + def max(comparator: Comparator[_ >: T]): Optional[T] + + def min(comparator: Comparator[_ >: T]): Optional[T] + + def noneMatch(pred: Predicate[_ >: T]): Boolean + + def peek(action: Consumer[_ >: T]): Stream[T] + + def reduce(accumulator: BinaryOperator[T]): Optional[T] + + def reduce(identity: T, accumulator: BinaryOperator[T]): T + + def reduce[U]( + identity: U, + accumulator: BiFunction[U, _ >: T, U], + combiner: BinaryOperator[U] + ): U + + def skip(n: Long): Stream[T] + + def spliterator(): Spliterator[_ <: T] + + def sorted(): Stream[T] + + def sorted(comparator: Comparator[_ >: T]): Stream[T] + + // Since: Java 9 + def takeWhile(pred: Predicate[_ >: T]): Stream[T] = { + Objects.requireNonNull(pred) + + val spliter = this.spliterator() // also marks this stream "operated upon" + + // JVM appears to use an unsized iterator for takeWhile() + // May need to adjust other characteristics. + val unSized = spliter.characteristics() & + ~(Spliterator.SIZED | Spliterator.SUBSIZED) + + val spl = new Spliterators.AbstractSpliterator[T]( + Long.MaxValue, + unSized + ) { + var done = false // short-circuit + + override def trySplit(): Spliterator[T] = + null.asInstanceOf[Spliterator[T]] + + def tryAdvance(action: Consumer[_ >: T]): Boolean = { + if (done) false + else + spliter.tryAdvance((e) => + if (!pred.test(e)) done = true + else action.accept(e) + ) + } + } + + new ObjectStreamImpl[T](spl, parallel = false, parent = this) + } + + def toArray(): Array[Object] + + def toArray[A <: Object](generator: IntFunction[Array[A]]): Array[A] + + // Since: Java 16 + def toList[T](): List[T] = { + // A loose translation of the Java 19 toList example implementation. + // That doc suggests that implementations override this inelegant + // implementation. + + val spliter = this.spliterator() // also marks this stream "operated upon" + + // Use size knowledge, if available, to reduce list re-sizing overhead. + val knownSize = spliter.getExactSizeIfKnown() + val initialSize = + if (knownSize < 0) 50 // a guess, intended to be better than default 16 + else knownSize.toInt + + val aL = new ArrayList[T](initialSize) + + spliter.forEachRemaining((e) => aL.add(e.asInstanceOf[T])) + + Collections.unmodifiableList(aL) + } } object Stream { @@ -19,8 +320,100 @@ object Stream { def build(): Stream[T] } - def builder[T](): Builder[T] = new WrappedScalaStream.Builder[T] - def empty[T](): Stream[T] = new WrappedScalaStream(SStream.empty[T], None) - def of[T](values: Array[AnyRef]): Stream[T] = - new WrappedScalaStream(values.asInstanceOf[Array[T]].toScalaStream, None) + def builder[T](): Builder[T] = new ObjectStreamImpl.Builder[T] + + def concat[T](a: Stream[_ <: T], b: Stream[_ <: T]): Stream[T] = + ObjectStreamImpl.concat(a, b) + + def empty[T](): Stream[T] = + new ObjectStreamImpl(Spliterators.emptySpliterator[T](), parallel = false) + + def generate[T](s: Supplier[T]): Stream[T] = { + val spliter = + new Spliterators.AbstractSpliterator[T](Long.MaxValue, 0) { + def tryAdvance(action: Consumer[_ >: T]): Boolean = { + action.accept(s.get()) + true + } + } + + new ObjectStreamImpl(spliter, parallel = false) + } + + // Since: Java 9 + def iterate[T]( + seed: T, + hasNext: Predicate[T], + next: UnaryOperator[T] + ): Stream[T] = { + // "seed" on RHS here is to keep compiler happy with local var init + var previous = seed + var seedUsed = false + + val spliter = + new Spliterators.AbstractSpliterator[T](Long.MaxValue, 0) { + def tryAdvance(action: Consumer[_ >: T]): Boolean = { + val current = + if (seedUsed) next(previous) + else { + seedUsed = true + seed + } + + val advanceOK = hasNext.test(current) + if (advanceOK) { + action.accept(current) + previous = current + } + advanceOK + } + } + + new ObjectStreamImpl(spliter, parallel = false) + } + + def iterate[T](seed: T, f: UnaryOperator[T]): Stream[T] = { + var previous = seed // "seed" here is just to keep compiler happy. + var seedUsed = false + + val spliter = + new Spliterators.AbstractSpliterator[T](Long.MaxValue, 0) { + def tryAdvance(action: Consumer[_ >: T]): Boolean = { + val current = + if (seedUsed) f(previous) + else { + seedUsed = true + seed + } + + action.accept(current) + previous = current + true + } + } + + new ObjectStreamImpl(spliter, parallel = false) + } + + def of[T](values: Array[Object]): Stream[T] = { + /* One would expect variables arguments to be declared as + * "values: Objects*" here. + * However, that causes "symbol not found" errors at OS link time. + * An implicit conversion must be missing in the javalib environment. + */ + + val bldr = Stream.builder[T]() + for (j <- values) + bldr.add(j.asInstanceOf[T]) + bldr.build() + } + + def of[T](t: Object): Stream[T] = + Stream.builder[T]().add(t.asInstanceOf[T]).build() + + // Since: Java 9 + def ofNullable[T <: Object](t: T): Stream[T] = { + if (t == null) Stream.empty[T]() + else Stream.of[T](t) + } } diff --git a/javalib/src/main/scala/java/util/stream/StreamSupport.scala b/javalib/src/main/scala/java/util/stream/StreamSupport.scala new file mode 100644 index 0000000000..a25d28067b --- /dev/null +++ b/javalib/src/main/scala/java/util/stream/StreamSupport.scala @@ -0,0 +1,74 @@ +package java.util.stream + +import java.util.function.Supplier +import java.util.Spliterator + +object StreamSupport { + + /* Design Note: + * stream() and doubleStream() are implemented. intStream() and + * longStream() are not. The first two need to mature before + * doubleStream() gets propagated into the latter two. No sense + * multiplying bugs beyond necessity, said William. + */ + + def doubleStream( + spliterator: Spliterator.OfDouble, + parallel: Boolean + ): DoubleStream = { + new DoubleStreamImpl(spliterator, parallel) + } + + def doubleStream( + supplier: Supplier[Spliterator.OfDouble], + characteristics: Int, + parallel: Boolean + ): DoubleStream = { + new DoubleStreamImpl(supplier, characteristics, parallel) + } + + def intStream( + spliterator: Spliterator.OfInt, + parallel: Boolean + ): IntStream = { + throw new UnsupportedOperationException("Not Yet Implemented") + } + + def intStream( + supplier: Supplier[Spliterator.OfInt], + characteristics: Int, + parallel: Boolean + ): IntStream = { + throw new UnsupportedOperationException("Not Yet Implemented") + } + + def longStream( + spliterator: Spliterator.OfLong, + parallel: Boolean + ): LongStream = { + throw new UnsupportedOperationException("Not Yet Implemented") + } + + def longStream( + supplier: Supplier[Spliterator.OfLong], + characteristics: Int, + parallel: Boolean + ): LongStream = { + throw new UnsupportedOperationException("Not Yet Implemented") + } + + def stream[T]( + spliterator: Spliterator[T], + parallel: Boolean + ): Stream[T] = { + new ObjectStreamImpl[T](spliterator, parallel) + } + + def stream[T]( + supplier: Supplier[Spliterator[T]], + characteristics: Int, + parallel: Boolean + ): Stream[T] = { + new ObjectStreamImpl[T](supplier, characteristics, parallel) + } +} diff --git a/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala b/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala deleted file mode 100644 index 41059c1fd0..0000000000 --- a/javalib/src/main/scala/java/util/stream/WrappedScalaStream.scala +++ /dev/null @@ -1,112 +0,0 @@ -package java.util.stream - -import java.util.Iterator -import scalanative.compat.StreamsCompat._ -import java.util.function.{Consumer, Function, Predicate} - -class WrappedScalaStream[T]( - private val underlying: SStream[T], - closeHandler: Option[Runnable] -) extends Stream[T] { - override def close(): Unit = closeHandler.foreach(_.run()) - override def isParallel(): Boolean = false - override def iterator(): Iterator[T] = - WrappedScalaStream.scala2javaIterator(underlying.iterator) - override def parallel(): Stream[T] = this - override def sequential(): Stream[T] = this - override def unordered(): Stream[T] = this - override def onClose(closeHandler: Runnable): Stream[T] = - new WrappedScalaStream(underlying, Some(closeHandler)) - - override def flatMap[R]( - mapper: Function[_ >: T, _ <: Stream[_ <: R]] - ): Stream[R] = { - val streams = underlying.map(v => mapper(v).asInstanceOf[Stream[R]]) - new CompositeStream(streams, closeHandler) - } - - override def filter(pred: Predicate[_ >: T]): Stream[T] = - new WrappedScalaStream(underlying.filter(pred.test), closeHandler) - - override def forEach(action: Consumer[_ >: T]): Unit = - underlying.foreach(action.accept(_)) -} - -object WrappedScalaStream { - class Builder[T] extends Stream.Builder[T] { - val buffer = new scala.collection.mutable.ListBuffer[T]() - override def accept(t: T): Unit = buffer += t - override def build(): Stream[T] = - new WrappedScalaStream(buffer.toScalaStream, None) - } - - def scala2javaIterator[T]( - it: scala.collection.Iterator[T] - ): java.util.Iterator[T] = - new java.util.Iterator[T] { - override def hasNext(): Boolean = it.hasNext - override def next(): T = it.next() - override def remove(): Unit = throw new UnsupportedOperationException() - } -} - -private final class CompositeStream[T]( - substreams: Seq[Stream[T]], - closeHandler: Option[Runnable] -) extends Stream[T] { - override def close(): Unit = { - substreams.foreach(_.close()) - closeHandler.foreach(_.run()) - } - override def isParallel(): Boolean = false - override def iterator(): Iterator[T] = - new Iterator[T] { - private val its = substreams.iterator - private var currentIt: Iterator[_ <: T] = EmptyIterator - - override def hasNext(): Boolean = - if (currentIt.hasNext()) true - else if (its.hasNext) { - currentIt = its.next().iterator() - hasNext() - } else { - false - } - - override def next(): T = - if (hasNext()) currentIt.next() - else throw new NoSuchElementException() - - override def remove(): Unit = - throw new UnsupportedOperationException() - - } - - override def parallel(): Stream[T] = this - override def sequential(): Stream[T] = this - override def unordered(): Stream[T] = this - override def onClose(closeHandler: Runnable): Stream[T] = - new CompositeStream(substreams, Some(closeHandler)) - - override def flatMap[R]( - mapper: Function[_ >: T, _ <: Stream[_ <: R]] - ): Stream[R] = { - val newStreams: Seq[Stream[R]] = - substreams.map((js: Stream[T]) => js.flatMap[R](mapper)) - new CompositeStream(newStreams, closeHandler) - } - - override def filter(pred: Predicate[_ >: T]): Stream[T] = { - val newStreams: Seq[Stream[T]] = substreams.map(s => s.filter(pred)) - new CompositeStream(newStreams, closeHandler) - } - - override def forEach(action: Consumer[_ >: T]): Unit = - substreams.foreach(_.forEach(action)) -} - -private object EmptyIterator extends Iterator[Nothing] { - override def hasNext(): Boolean = false - override def next(): Nothing = throw new NoSuchElementException() - override def remove(): Unit = throw new UnsupportedOperationException() -} diff --git a/javalib/src/main/scala/scala/scalanative/compat/StreamsCompat.scala b/javalib/src/main/scala/scala/scalanative/compat/StreamsCompat.scala deleted file mode 100644 index 8f86548e3b..0000000000 --- a/javalib/src/main/scala/scala/scalanative/compat/StreamsCompat.scala +++ /dev/null @@ -1,17 +0,0 @@ -package scala.scalanative.compat -import scala.language.implicitConversions - -object StreamsCompat { - type SStream[T] = scalanative.compat.ScalaStream.Underlying[T] - val SStreamImpl = scalanative.compat.ScalaStream - val SStream = SStreamImpl.Underlying - - implicit class ArrayToScalaStream[T](val arr: Array[T]) extends AnyVal { - def toScalaStream: SStream[T] = SStreamImpl.seqToScalaStream[T](arr) - } - - implicit class IterableToScalaStream[T](val seq: Iterable[T]) extends AnyVal { - def toScalaStream: SStream[T] = SStreamImpl.seqToScalaStream[T](seq) - } - -} diff --git a/javalib/src/main/scala/scala/scalanative/nio/fs/FileHelpers.scala b/javalib/src/main/scala/scala/scalanative/nio/fs/FileHelpers.scala index d7c8e2deb4..012ff31042 100644 --- a/javalib/src/main/scala/scala/scalanative/nio/fs/FileHelpers.scala +++ b/javalib/src/main/scala/scala/scalanative/nio/fs/FileHelpers.scala @@ -49,8 +49,6 @@ object FileHelpers { } } - private[this] lazy val random = new scala.util.Random() - final case class Dirent(name: String, tpe: CShort) def list[T: ClassTag]( path: String, f: (String, FileType) => T, @@ -216,6 +214,8 @@ object FileHelpers { dir } + private[this] lazy val random = new scala.util.Random() + private def genTempFile( prefix: String, suffix: String, diff --git a/javalib/src/main/scala/scala/scalanative/nio/fs/windows/WindowsPath.scala b/javalib/src/main/scala/scala/scalanative/nio/fs/windows/WindowsPath.scala index 984971ec7d..b3cf0b72d2 100644 --- a/javalib/src/main/scala/scala/scalanative/nio/fs/windows/WindowsPath.scala +++ b/javalib/src/main/scala/scala/scalanative/nio/fs/windows/WindowsPath.scala @@ -42,10 +42,10 @@ class WindowsPath private[windows] ( case Array(host, share) => share + "\\" case _ => "" } - case (PathType.Absolute, Some(root)) => root - case (PathType.DirectoryRelative, Some(root)) => root + "\\" - case (PathType.DriveRelative, _) => "\\" - case _ => "" + case (PathType.Absolute, Some(root)) => root + case (PathType.DirectoryRelative, _) => "\\" + case (PathType.DriveRelative, Some(root)) => root + case _ => "" } drivePrefix + segments.mkString(seperator) } @@ -69,7 +69,9 @@ class WindowsPath private[windows] ( override def getParent(): Path = { val nameCount = getNameCount() - if (nameCount == 0 || (nameCount == 1 && !isAbsolute())) + if (nameCount == 0) + null + else if (nameCount == 1 && pathType != PathType.Absolute && pathType != PathType.DirectoryRelative) null else if (root.isDefined) new WindowsPath(pathType, root, segments.init) @@ -164,20 +166,26 @@ class WindowsPath private[windows] ( resolveSibling(WindowsPathParser(other)) override def relativize(other: Path): Path = { - if (isAbsolute() ^ other.isAbsolute()) { + val otherType = other match { + case null => throw new NullPointerException() + case p: WindowsPath => p.pathType + case _ => + throw new IllegalArgumentException("'other' is different Path class") + } + if (pathType != otherType) { throw new IllegalArgumentException("'other' is different type of Path") } else { - val normThis = new WindowsPath(WindowsPath.normalized(this)) + val normThis = WindowsPathParser(WindowsPath.normalized(this)) if (normThis.toString.isEmpty()) { other } else if (other.startsWith(normThis)) { other.subpath(getNameCount(), other.getNameCount()) } else if (normThis.getParent() == null) { - new WindowsPath("../" + other.toString()) + WindowsPathParser("../" + other.toString()) } else { val next = normThis.getParent().relativize(other).toString() if (next.isEmpty()) new WindowsPath("..") - else new WindowsPath("../" + next) + else WindowsPathParser("../" + next) } } } diff --git a/javalib/src/main/scala/scala/scalanative/nio/fs/windows/WindowsPathParser.scala b/javalib/src/main/scala/scala/scalanative/nio/fs/windows/WindowsPathParser.scala index fc3959f430..2da85ded52 100644 --- a/javalib/src/main/scala/scala/scalanative/nio/fs/windows/WindowsPathParser.scala +++ b/javalib/src/main/scala/scala/scalanative/nio/fs/windows/WindowsPathParser.scala @@ -27,20 +27,23 @@ object WindowsPathParser { rawPath.size > n && pred(rawPath.charAt(n)) } - val (tpe, root) = if (charAtIdx(0, isSlash)) { - if (charAtIdx(1, isSlash)) - UNC -> Some(getUNCRoot(rawPath)) - else if (charAtIdx(1, isASCIILetter) && charAtIdx(2, _ == ':')) - // URI specific, absolute path starts with / followed by absolute path - Absolute -> Some(rawPath.substring(1, 4)) - else - DriveRelative -> None - } else if (charAtIdx(0, isASCIILetter) && charAtIdx(1, _ == ':')) { - if (charAtIdx(2, isSlash)) - Absolute -> Some(rawPath.substring(0, 3)) - else - DirectoryRelative -> Some(rawPath.substring(0, 2)) - } else Relative -> None + val (tpe, root) = + if (rawPath.isEmpty) + Relative -> None + else if (charAtIdx(0, isSlash)) { + if (charAtIdx(1, isSlash)) + UNC -> Some(getUNCRoot(rawPath)) + else if (charAtIdx(1, isASCIILetter) && charAtIdx(2, _ == ':')) + // URI specific, absolute path starts with / followed by absolute path + Absolute -> Some(rawPath.substring(1, 4)) + else + DirectoryRelative -> Some(rawPath.substring(0, 1)) + } else if (charAtIdx(0, isASCIILetter) && charAtIdx(1, _ == ':')) { + if (charAtIdx(2, isSlash)) + Absolute -> Some(rawPath.substring(0, 3)) + else + DriveRelative -> Some(rawPath.substring(0, 2)) + } else Relative -> None val relativePath = root .map(r => rawPath.substring(r.length)) diff --git a/junit-runtime/src/main/scala/org/junit/internal/InexactComparisonCriteria.scala b/junit-runtime/src/main/scala/org/junit/internal/InexactComparisonCriteria.scala index ed06d08376..8de8be1403 100644 --- a/junit-runtime/src/main/scala/org/junit/internal/InexactComparisonCriteria.scala +++ b/junit-runtime/src/main/scala/org/junit/internal/InexactComparisonCriteria.scala @@ -18,10 +18,19 @@ class InexactComparisonCriteria private (val fDelta: AnyRef) expected: AnyRef, actual: AnyRef ): Unit = { - Assert.assertEquals( - expected.asInstanceOf[Double], - actual.asInstanceOf[Double], - fDelta.asInstanceOf[Double] - ) + fDelta match { + case delta: java.lang.Double => + Assert.assertEquals( + expected.asInstanceOf[Double], + actual.asInstanceOf[Double], + delta + ) + case delta: java.lang.Float => + Assert.assertEquals( + expected.asInstanceOf[Float], + actual.asInstanceOf[Float], + delta + ) + } } } diff --git a/nativelib/src/main/resources/scala-native/gc/boehm/gc.c b/nativelib/src/main/resources/scala-native/gc/boehm/gc.c index c264f2fd0e..6f9fb8d0d7 100644 --- a/nativelib/src/main/resources/scala-native/gc/boehm/gc.c +++ b/nativelib/src/main/resources/scala-native/gc/boehm/gc.c @@ -2,6 +2,7 @@ #include #include #include +#include "../shared/Parsing.h" #if defined(_WIN32) || defined(WIN32) // Boehm on Windows needs User32.lib linked @@ -38,6 +39,19 @@ void *scalanative_alloc_atomic(void *info, size_t size) { return (void *)alloc; } +size_t scalanative_get_init_heapsize() { + return Parse_Env_Or_Default("GC_INITIAL_HEAP_SIZE", 0L); +} + +size_t scalanative_get_max_heapsize() { + struct GC_prof_stats_s *stats = + (struct GC_prof_stats_s *)malloc(sizeof(struct GC_prof_stats_s)); + GC_get_prof_stats(stats, sizeof(struct GC_prof_stats_s)); + size_t heap_sz = stats->heapsize_full; + free(stats); + return Parse_Env_Or_Default("GC_MAXIMUM_HEAP_SIZE", heap_sz); +} + void scalanative_collect() { GC_gcollect(); } void scalanative_register_weak_reference_handler(void *handler) {} diff --git a/nativelib/src/main/resources/scala-native/gc/commix/ComixGC.c b/nativelib/src/main/resources/scala-native/gc/commix/CommixGC.c similarity index 75% rename from nativelib/src/main/resources/scala-native/gc/commix/ComixGC.c rename to nativelib/src/main/resources/scala-native/gc/commix/CommixGC.c index 1ee1a0e60d..dbd48b461a 100644 --- a/nativelib/src/main/resources/scala-native/gc/commix/ComixGC.c +++ b/nativelib/src/main/resources/scala-native/gc/commix/CommixGC.c @@ -16,6 +16,8 @@ #include "WeakRefGreyList.h" #include "Sweeper.h" +#include "Parsing.h" + void scalanative_collect(); void scalanative_afterexit() { @@ -83,3 +85,19 @@ INLINE void scalanative_collect() { INLINE void scalanative_register_weak_reference_handler(void *handler) { WeakRefGreyList_SetHandler(handler); } + +/* Get the minimum heap size */ +/* If the user has set a minimum heap size using the GC_INITIAL_HEAP_SIZE + * environment variable, */ +/* then this size will be returned. */ +/* Otherwise, the default minimum heap size will be returned.*/ +size_t scalanative_get_init_heapsize() { return Settings_MinHeapSize(); } + +/* Get the maximum heap size */ +/* If the user has set a maximum heap size using the GC_MAXIMUM_HEAP_SIZE */ +/* environment variable,*/ +/* then this size will be returned.*/ +/* Otherwise, the total size of the physical memory (guarded) will be returned*/ +size_t scalanative_get_max_heapsize() { + return Parse_Env_Or_Default("GC_MAXIMUM_HEAP_SIZE", Heap_getMemoryLimit()); +} \ No newline at end of file diff --git a/nativelib/src/main/resources/scala-native/gc/commix/Heap.h b/nativelib/src/main/resources/scala-native/gc/commix/Heap.h index 9cfda96d3e..d44d967ffc 100644 --- a/nativelib/src/main/resources/scala-native/gc/commix/Heap.h +++ b/nativelib/src/main/resources/scala-native/gc/commix/Heap.h @@ -88,5 +88,6 @@ void Heap_Init(Heap *heap, size_t minHeapSize, size_t maxHeapSize); void Heap_Collect(Heap *heap); void Heap_GrowIfNeeded(Heap *heap); void Heap_Grow(Heap *heap, uint32_t increment); +size_t Heap_getMemoryLimit(); #endif // IMMIX_HEAP_H diff --git a/nativelib/src/main/resources/scala-native/gc/immix/Heap.h b/nativelib/src/main/resources/scala-native/gc/immix/Heap.h index 801a03fabb..9732b090a2 100644 --- a/nativelib/src/main/resources/scala-native/gc/immix/Heap.h +++ b/nativelib/src/main/resources/scala-native/gc/immix/Heap.h @@ -50,5 +50,6 @@ void Heap_Collect(Heap *heap, Stack *stack); void Heap_Recycle(Heap *heap); void Heap_Grow(Heap *heap, uint32_t increment); +size_t Heap_getMemoryLimit(); #endif // IMMIX_HEAP_H diff --git a/nativelib/src/main/resources/scala-native/gc/immix/ImmixGC.c b/nativelib/src/main/resources/scala-native/gc/immix/ImmixGC.c index 04dc85aae4..4277bda93d 100644 --- a/nativelib/src/main/resources/scala-native/gc/immix/ImmixGC.c +++ b/nativelib/src/main/resources/scala-native/gc/immix/ImmixGC.c @@ -12,6 +12,10 @@ #include "Constants.h" #include "Settings.h" #include "WeakRefStack.h" +#include "Parsing.h" + +// Stack boottom of the main thread +extern word_t **__stack_bottom; void scalanative_collect(); @@ -57,3 +61,19 @@ INLINE void scalanative_collect() { Heap_Collect(&heap, &stack); } INLINE void scalanative_register_weak_reference_handler(void *handler) { WeakRefStack_SetHandler(handler); } + +/* Get the minimum heap size */ +/* If the user has set a minimum heap size using the GC_INITIAL_HEAP_SIZE + * environment variable, */ +/* then this size will be returned. */ +/* Otherwise, the default minimum heap size will be returned.*/ +size_t scalanative_get_init_heapsize() { return Settings_MinHeapSize(); } + +/* Get the maximum heap size */ +/* If the user has set a maximum heap size using the GC_MAXIMUM_HEAP_SIZE + * environment variable,*/ +/* then this size will be returned.*/ +/* Otherwise, the total size of the physical memory (guarded) will be returned*/ +size_t scalanative_get_max_heapsize() { + return Parse_Env_Or_Default("GC_MAXIMUM_HEAP_SIZE", Heap_getMemoryLimit()); +} diff --git a/nativelib/src/main/resources/scala-native/gc/none/gc.c b/nativelib/src/main/resources/scala-native/gc/none/gc.c index 97a8be81d2..e4a9af1fdc 100644 --- a/nativelib/src/main/resources/scala-native/gc/none/gc.c +++ b/nativelib/src/main/resources/scala-native/gc/none/gc.c @@ -34,6 +34,14 @@ void exitWithOutOfMemory() { exit(1); } +size_t scalanative_get_init_heapsize() { + return Parse_Env_Or_Default("GC_INITIAL_HEAP_SIZE", 0L); +} + +size_t scalanative_get_max_heapsize() { + return Parse_Env_Or_Default("GC_MAXIMUM_HEAP_SIZE", getMemorySize()); +} + void Prealloc_Or_Default() { if (TO_NORMAL_MMAP == 1L) { // Check if we have prealloc env varible diff --git a/nativelib/src/main/resources/scala-native/gc/shared/ScalaNativeGC.h b/nativelib/src/main/resources/scala-native/gc/shared/ScalaNativeGC.h new file mode 100644 index 0000000000..af81079e2d --- /dev/null +++ b/nativelib/src/main/resources/scala-native/gc/shared/ScalaNativeGC.h @@ -0,0 +1,77 @@ +#ifndef SCALA_NATIVE_GC_H +#define SCALA_NATIVE_GC_H +#include +#include +#include "GCTypes.h" + +#ifdef _WIN32 +#define WIN32_LEAN_AND_MEAN +// Boehm on Windows needs User32.lib linked +#pragma comment(lib, "User32.lib") +#pragma comment(lib, "Kernel32.lib") +#include +typedef DWORD ThreadRoutineReturnType; +#else +#include +typedef void *ThreadRoutineReturnType; +#endif + +typedef ThreadRoutineReturnType (*ThreadStartRoutine)(void *); +typedef void *RoutineArgs; + +void scalanative_init(); +void *scalanative_alloc(void *info, size_t size); +void *scalanative_alloc_small(void *info, size_t size); +void *scalanative_alloc_large(void *info, size_t size); +void *scalanative_alloc_atomic(void *info, size_t size); +void scalanative_collect(); +void scalanative_register_weak_reference_handler(void *handler); +size_t scalanative_get_init_heapsize(); +size_t scalanative_get_max_heapsize(); + +// Functions used to create a new thread supporting multithreading support in +// the garbage collector. Would execute a proxy startup routine to register +// newly created thread upon startup and unregister it from the GC upon +// termination. +#ifdef _WIN32 +HANDLE scalanative_CreateThread(LPSECURITY_ATTRIBUTES threadAttributes, + SIZE_T stackSize, ThreadStartRoutine routine, + RoutineArgs args, DWORD creationFlags, + DWORD *threadId); +#else +int scalanative_pthread_create(pthread_t *thread, pthread_attr_t *attr, + ThreadStartRoutine routine, RoutineArgs args); +#endif + +// Current type of execution by given threadin foreign scope be included in the +// stop-the-world mechanism, as they're assumed to not modify the state of the +// GC. Upon conversion from Managed to Unmanged state calling thread shall dump +// the contents of the register to the stack and save the top address of the +// stack. +typedef enum scalanative_MutatorThreadState { + /* Thread executes Scala Native code using GC following cooperative mode - + * it periodically polls for synchronization events. + */ + MutatorThreadState_Managed = 0, + /* Thread executes foreign code (syscalls, C functions) and is not able to + * modify the state of the GC. Upon synchronization event garbage collector + * would ignore this thread. Upon returning from foreign execution thread + * would stop until synchronization event would finish. + */ + MutatorThreadState_Unmanaged = 1 +} MutatorThreadState; + +// Receiver for notifications on entering/exiting potentially blocking extern +// functions. Changes the internal state of current (calling) thread +void scalanative_gc_set_mutator_thread_state(MutatorThreadState); + +// Conditionally protected memory address used for STW events polling +typedef void **safepoint_t; +extern safepoint_t scalanative_gc_safepoint; + +// Check for StopTheWorld event and wait for its end if needed +// Used internally only in GC. Scala Native safepoints polling would be inlined +// in the code. +void scalanative_gc_safepoint_poll(); + +#endif // SCALA_NATIVE_GC_H diff --git a/nativelib/src/main/scala/scala/scalanative/meta/LinktimeInfo.scala b/nativelib/src/main/scala/scala/scalanative/meta/LinktimeInfo.scala index a929e4b4f8..c258090e6e 100644 --- a/nativelib/src/main/scala/scala/scalanative/meta/LinktimeInfo.scala +++ b/nativelib/src/main/scala/scala/scalanative/meta/LinktimeInfo.scala @@ -12,20 +12,31 @@ object LinktimeInfo { @resolvedAtLinktime("scala.scalanative.meta.linktimeinfo.releaseMode") def releaseMode: Boolean = resolved - @resolvedAtLinktime("scala.scalanative.meta.linktimeinfo.isWindows") - def isWindows: Boolean = resolved + @resolvedAtLinktime + def isWindows: Boolean = target.os == "windows" - @resolvedAtLinktime("scala.scalanative.meta.linktimeinfo.isLinux") - def isLinux: Boolean = resolved + @resolvedAtLinktime + def isLinux: Boolean = target.os == "linux" - @resolvedAtLinktime("scala.scalanative.meta.linktimeinfo.isMac") - def isMac: Boolean = resolved + @resolvedAtLinktime + def isMac: Boolean = target.vendor == "apple" && target.os == "darwin" - @resolvedAtLinktime("scala.scalanative.meta.linktimeinfo.isFreeBSD") - def isFreeBSD: Boolean = resolved + @resolvedAtLinktime + def isFreeBSD: Boolean = target.os == "freebsd" @resolvedAtLinktime( "scala.scalanative.meta.linktimeinfo.isWeakReferenceSupported" ) def isWeakReferenceSupported: Boolean = resolved + + object target { + @resolvedAtLinktime("scala.scalanative.meta.linktimeinfo.target.arch") + def arch: String = resolved + @resolvedAtLinktime("scala.scalanative.meta.linktimeinfo.target.vendor") + def vendor: String = resolved + @resolvedAtLinktime("scala.scalanative.meta.linktimeinfo.target.os") + def os: String = resolved + @resolvedAtLinktime("scala.scalanative.meta.linktimeinfo.target.env") + def env: String = resolved + } } diff --git a/nativelib/src/main/scala/scala/scalanative/runtime/GC.scala b/nativelib/src/main/scala/scala/scalanative/runtime/GC.scala index dec4707132..0e6e575d76 100644 --- a/nativelib/src/main/scala/scala/scalanative/runtime/GC.scala +++ b/nativelib/src/main/scala/scala/scalanative/runtime/GC.scala @@ -35,4 +35,8 @@ object GC { def init(): Unit = extern @name("scalanative_register_weak_reference_handler") def registerWeakReferenceHandler(handler: Ptr[Byte]): Unit = extern + @name("scalanative_get_init_heapsize") + def getInitHeapSize(): CSize = extern + @name("scalanative_get_max_heapsize") + def getMaxHeapSize(): CSize = extern } diff --git a/nativelib/src/main/scala/scala/scalanative/runtime/MemoryPool.scala b/nativelib/src/main/scala/scala/scalanative/runtime/MemoryPool.scala index 70b9870855..d89e4e4baf 100644 --- a/nativelib/src/main/scala/scala/scalanative/runtime/MemoryPool.scala +++ b/nativelib/src/main/scala/scala/scalanative/runtime/MemoryPool.scala @@ -78,7 +78,7 @@ object MemoryPool { final class MemoryPoolZone(private[this] val pool: MemoryPool) extends Zone { private[this] var tailPage = pool.claim() private[this] var headPage = tailPage - private[this] var largeAllocations: scala.Array[RawPtr] = null + private[this] var largeAllocations: scala.Array[Ptr[_]] = null private[this] var largeOffset = 0 private def checkOpen(): Unit = @@ -109,7 +109,7 @@ final class MemoryPoolZone(private[this] val pool: MemoryPool) extends Zone { if (largeAllocations != null) { var i = 0 while (i < largeOffset) { - libc.free(largeAllocations(i)) + libc.free(toRawPtr(largeAllocations(i))) i += 1 } largeAllocations = null @@ -157,11 +157,11 @@ final class MemoryPoolZone(private[this] val pool: MemoryPool) extends Zone { private def allocLarge(size: CSize): Ptr[Byte] = { if (largeAllocations == null) { - largeAllocations = new scala.Array[RawPtr](16) + largeAllocations = new scala.Array[Ptr[_]](16) } if (largeOffset == largeAllocations.size) { val newLargeAllocations = - new scala.Array[RawPtr](largeAllocations.size * 2) + new scala.Array[Ptr[_]](largeAllocations.size * 2) Array.copy( largeAllocations, 0, @@ -171,11 +171,11 @@ final class MemoryPoolZone(private[this] val pool: MemoryPool) extends Zone { ) largeAllocations = newLargeAllocations } - val result = libc.malloc(size) + val result = fromRawPtr[Byte](libc.malloc(size)) largeAllocations(largeOffset) = result largeOffset += 1 - fromRawPtr(result) + result } } diff --git a/nativelib/src/main/scala/scala/scalanative/unsafe/resolvedAtLinktime.scala b/nativelib/src/main/scala/scala/scalanative/unsafe/resolvedAtLinktime.scala index be3baad658..3890e9470e 100644 --- a/nativelib/src/main/scala/scala/scalanative/unsafe/resolvedAtLinktime.scala +++ b/nativelib/src/main/scala/scala/scalanative/unsafe/resolvedAtLinktime.scala @@ -4,9 +4,16 @@ import scala.annotation.StaticAnnotation import scala.annotation.meta.{field, getter} import scala.scalanative.runtime.intrinsic -/** Used to annotate that given value should be resolved at link-time, based on - * provided `withName` parameter +/** Used to annotate methods which should be evaluated in linktime, allowing to + * remove unused paths and symbols, e.g. whe cross compiling for different OS + * Annotated methods needs to operate only on literal values, other methods + * with this annotation. */ @field @getter -private[scalanative] class resolvedAtLinktime(withName: String) - extends StaticAnnotation +class resolvedAtLinktime() extends StaticAnnotation { + + /** Used to annotate that given value should be resolved at link-time, based + * on provided `withName` parameter provided by the build tool. + */ + def this(withName: String) = this() +} diff --git a/nir/src/main/scala/scala/scalanative/nir/Attrs.scala b/nir/src/main/scala/scala/scalanative/nir/Attrs.scala index 89652ebee3..3d5dc19bb6 100644 --- a/nir/src/main/scala/scala/scalanative/nir/Attrs.scala +++ b/nir/src/main/scala/scala/scalanative/nir/Attrs.scala @@ -30,6 +30,7 @@ object Attr { case object Extern extends Attr final case class Link(name: String) extends Attr case object Abstract extends Attr + case object LinktimeResolved extends Attr } final case class Attrs( @@ -40,7 +41,8 @@ final case class Attrs( isDyn: Boolean = false, isStub: Boolean = false, isAbstract: Boolean = false, - links: Seq[Attr.Link] = Seq.empty + links: Seq[Attr.Link] = Seq.empty, + isLinktimeResolved: Boolean = false ) { def toSeq: Seq[Attr] = { val out = Seq.newBuilder[Attr] @@ -52,10 +54,74 @@ final case class Attrs( if (isDyn) out += Dyn if (isStub) out += Stub if (isAbstract) out += Abstract + if (isLinktimeResolved) out += LinktimeResolved out ++= links out.result() } + + // backward compatibilty + def this( + inlineHint: Inline, + specialize: Specialize, + opt: Opt, + isExtern: Boolean, + isDyn: Boolean, + isStub: Boolean, + isAbstract: Boolean, + links: Seq[Attr.Link] + ) = this( + inlineHint = inlineHint, + specialize = specialize, + opt = opt, + isExtern = isExtern, + isDyn = isDyn, + isStub = isStub, + isAbstract = isAbstract, + isLinktimeResolved = false, + links = links + ) + def copy( + inlineHint: Inline = this.inlineHint, + specialize: Specialize = this.specialize, + opt: Opt = this.opt, + isExtern: Boolean = this.isExtern, + isDyn: Boolean = this.isDyn, + isStub: Boolean = this.isStub, + isAbstract: Boolean = this.isAbstract, + links: Seq[Attr.Link] = this.links, + isLinktimeResolved: Boolean = this.isLinktimeResolved + ): Attrs = new Attrs( + inlineHint = inlineHint, + specialize = specialize, + opt = opt, + isExtern = isExtern, + isDyn = isDyn, + isStub = isStub, + isAbstract = isAbstract, + links = links, + isLinktimeResolved = isLinktimeResolved + ) + def copy( + inlineHint: Inline, + specialize: Specialize, + opt: Opt, + isExtern: Boolean, + isDyn: Boolean, + isStub: Boolean, + isAbstract: Boolean, + links: Seq[Attr.Link] + ): Attrs = new Attrs( + inlineHint = inlineHint, + specialize = specialize, + opt = opt, + isExtern = isExtern, + isDyn = isDyn, + isStub = isStub, + isAbstract = isAbstract, + links = links, + isLinktimeResolved = this.isLinktimeResolved + ) } object Attrs { val None = new Attrs() @@ -68,6 +134,7 @@ object Attrs { var isDyn = false var isStub = false var isAbstract = false + var isLinktimeResolved = false val links = Seq.newBuilder[Attr.Link] attrs.foreach { @@ -79,6 +146,7 @@ object Attrs { case Stub => isStub = true case link: Attr.Link => links += link case Abstract => isAbstract = true + case LinktimeResolved => isLinktimeResolved = true } new Attrs( @@ -89,7 +157,30 @@ object Attrs { isDyn = isDyn, isStub = isStub, isAbstract = isAbstract, + isLinktimeResolved = isLinktimeResolved, links = links.result() ) } + + // backward compatibilty + def apply( + inlineHint: Inline, + specialize: Specialize, + opt: Opt, + isExtern: Boolean, + isDyn: Boolean, + isStub: Boolean, + isAbstract: Boolean, + links: Seq[Attr.Link] + ): Attrs = new Attrs( + inlineHint = inlineHint, + specialize = specialize, + opt = opt, + isExtern = isExtern, + isDyn = isDyn, + isStub = isStub, + isAbstract = isAbstract, + isLinktimeResolved = false, + links = links + ) } diff --git a/nir/src/main/scala/scala/scalanative/nir/Show.scala b/nir/src/main/scala/scala/scalanative/nir/Show.scala index afabf43184..070ef940f3 100644 --- a/nir/src/main/scala/scala/scalanative/nir/Show.scala +++ b/nir/src/main/scala/scala/scalanative/nir/Show.scala @@ -101,6 +101,8 @@ object Show { str("\")") case Attr.Abstract => str("abstract") + case Attr.LinktimeResolved => + str("linktime") } def next_(next: Next): Unit = next match { diff --git a/nir/src/main/scala/scala/scalanative/nir/Versions.scala b/nir/src/main/scala/scala/scalanative/nir/Versions.scala index 556855d222..c281fb4e29 100644 --- a/nir/src/main/scala/scala/scalanative/nir/Versions.scala +++ b/nir/src/main/scala/scala/scalanative/nir/Versions.scala @@ -25,7 +25,7 @@ object Versions { final val revision: Int = 9 // a.k.a. MINOR version /* Current public release version of Scala Native. */ - final val current: String = "0.4.12" + final val current: String = "0.4.14" final val currentBinaryVersion: String = binaryVersion(current) private object FullVersion { diff --git a/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala b/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala index 4941c222b9..fb705b5004 100644 --- a/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala +++ b/nir/src/main/scala/scala/scalanative/nir/serialization/BinaryDeserializer.scala @@ -79,6 +79,8 @@ final class BinaryDeserializer(buffer: ByteBuffer, bufferName: String) { case T.ExternAttr => Attr.Extern case T.LinkAttr => Attr.Link(getUTF8String()) case T.AbstractAttr => Attr.Abstract + + case T.LinktimeResolvedAttr => Attr.LinktimeResolved } private def getBin(): Bin = getInt match { diff --git a/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala b/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala index e6bb816095..dcca3cfa8f 100644 --- a/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala +++ b/nir/src/main/scala/scala/scalanative/nir/serialization/BinarySerializer.scala @@ -109,6 +109,8 @@ final class BinarySerializer { case Attr.Extern => putInt(T.ExternAttr) case Attr.Link(s) => putInt(T.LinkAttr); putUTF8String(s) case Attr.Abstract => putInt(T.AbstractAttr) + + case Attr.LinktimeResolved => putInt(T.LinktimeResolvedAttr) } private def putBin(bin: Bin) = bin match { diff --git a/nir/src/main/scala/scala/scalanative/nir/serialization/Tags.scala b/nir/src/main/scala/scala/scalanative/nir/serialization/Tags.scala index f1f8262f9c..a6b71d1ac7 100644 --- a/nir/src/main/scala/scala/scalanative/nir/serialization/Tags.scala +++ b/nir/src/main/scala/scala/scalanative/nir/serialization/Tags.scala @@ -27,6 +27,7 @@ object Tags { final val DynAttr = 1 + LinkAttr final val StubAttr = 1 + DynAttr final val AbstractAttr = 1 + StubAttr + final val LinktimeResolvedAttr = 1 + AbstractAttr // Binary ops diff --git a/nscplugin/src/main/scala-2/scala/scalanative/nscplugin/NirGenExpr.scala b/nscplugin/src/main/scala-2/scala/scalanative/nscplugin/NirGenExpr.scala index 552bd4153a..7bc8af39ad 100644 --- a/nscplugin/src/main/scala-2/scala/scalanative/nscplugin/NirGenExpr.scala +++ b/nscplugin/src/main/scala-2/scala/scalanative/nscplugin/NirGenExpr.scala @@ -189,16 +189,27 @@ trait NirGenExpr[G <: nsc.Global with Singleton] { self: NirGenPhase[G] => genIf(retty, cond, thenp, elsep)(tree.pos) } - def genIf(retty: nir.Type, condp: Tree, thenp: Tree, elsep: Tree)(implicit - ifPos: nir.Position - ): Val = { + def genIf( + retty: nir.Type, + condp: Tree, + thenp: Tree, + elsep: Tree, + ensureLinktime: Boolean = false + )(implicit ifPos: nir.Position): Val = { val thenn, elsen, mergen = fresh() val mergev = Val.Local(fresh(), retty) getLinktimeCondition(condp).fold { + if (ensureLinktime) { + globalError( + condp.pos, + "Cannot resolve given condition in linktime, it might be depending on runtime value" + ) + } val cond = genExpr(condp) buf.branch(cond, Next(thenn), Next(elsen))(condp.pos) } { cond => + curMethodUsesLinktimeResolvedValues = true buf.branchLinktime(cond, Next(thenn), Next(elsen))(condp.pos) } @@ -1216,7 +1227,7 @@ trait NirGenExpr[G <: nsc.Global with Singleton] { self: NirGenPhase[G] => condp match { // if(bool) (...) - case Apply(LinktimeProperty(name, position), Nil) => + case Apply(LinktimeProperty(name, _, position), Nil) => Some { SimpleCondition( propertyName = name, @@ -1228,7 +1239,7 @@ trait NirGenExpr[G <: nsc.Global with Singleton] { self: NirGenPhase[G] => // if(!bool) (...) case Apply( Select( - Apply(LinktimeProperty(name, position), Nil), + Apply(LinktimeProperty(name, _, position), Nil), nme.UNARY_! ), Nil @@ -1243,7 +1254,7 @@ trait NirGenExpr[G <: nsc.Global with Singleton] { self: NirGenPhase[G] => // if(property x) (...) case Apply( - Select(LinktimeProperty(name, position), comp), + Select(LinktimeProperty(name, _, position), comp), List(arg @ Literal(Constant(_))) ) => Some { @@ -2484,6 +2495,8 @@ trait NirGenExpr[G <: nsc.Global with Singleton] { self: NirGenPhase[G] => } res += promotedArg } + // Scala 2.13 only + case Select(_, name) if name == definitions.NilModule.name => () case _ => reporter.error( argp.pos, diff --git a/nscplugin/src/main/scala-2/scala/scalanative/nscplugin/NirGenPhase.scala b/nscplugin/src/main/scala-2/scala/scalanative/nscplugin/NirGenPhase.scala index a47066680f..ce5f0ae4f4 100644 --- a/nscplugin/src/main/scala-2/scala/scalanative/nscplugin/NirGenPhase.scala +++ b/nscplugin/src/main/scala-2/scala/scalanative/nscplugin/NirGenPhase.scala @@ -41,6 +41,7 @@ abstract class NirGenPhase[G <: Global with Singleton](override val global: G) protected val curStatBuffer = new util.ScopedVar[StatBuffer] protected val cachedMethodSig = collection.mutable.Map.empty[(Symbol, Boolean), nir.Type.Function] + protected var curMethodUsesLinktimeResolvedValues = false protected def unwind(implicit fresh: Fresh): Next = curUnwindHandler.get.fold[Next](Next.None) { handler => diff --git a/nscplugin/src/main/scala-2/scala/scalanative/nscplugin/NirGenStat.scala b/nscplugin/src/main/scala-2/scala/scalanative/nscplugin/NirGenStat.scala index daa8c551ce..330c231fab 100644 --- a/nscplugin/src/main/scala-2/scala/scalanative/nscplugin/NirGenStat.scala +++ b/nscplugin/src/main/scala-2/scala/scalanative/nscplugin/NirGenStat.scala @@ -637,8 +637,13 @@ trait NirGenStat[G <: nsc.Global with Singleton] { self: NirGenPhase[G] => scoped( curMethodSig := sig ) { + curMethodUsesLinktimeResolvedValues = false val body = genMethodBody(dd, rhs) - Some(Defn.Define(attrs, name, sig, body)) + val methodAttrs = + if (curMethodUsesLinktimeResolvedValues) + attrs.copy(isLinktimeResolved = true) + else attrs + Some(Defn.Define(methodAttrs, name, sig, body)) } } } @@ -653,46 +658,89 @@ trait NirGenStat[G <: nsc.Global with Singleton] { self: NirGenPhase[G] => "Link-time property cannot be constant value, it would be inlined by scalac compiler" ) } + val retty = genType(dd.tpt.tpe) + + import LinktimeProperty.Type._ + dd match { + case LinktimeProperty(propertyName, Provided, _) => + if (dd.rhs.symbol == ResolvedMethod) Some { + checkExplicitReturnTypeAnnotation(dd, "value resolved at link-time") + genLinktimeResolvedMethod(dd, retty, name) { + _.call( + Linktime.PropertyResolveFunctionTy(retty), + Linktime.PropertyResolveFunction(retty), + Val.String(propertyName) :: Nil, + Next.None + ) + } + } + else { + globalError( + dd.pos, + s"Link-time resolved property must have ${ResolvedMethod.fullName} as body" + ) + None + } - dd.rhs.symbol match { - case ResolvedMethod => - checkExplicitReturnTypeAnnotation(dd, "value resolved at link-time") - dd match { - case LinktimeProperty(propertyName, _) => - val retty = genType(dd.tpt.tpe) - val defn = genLinktimeResolvedMethod(retty, propertyName, name) - Some(defn) - case _ => None + case LinktimeProperty(_, Calculated, _) => + Some { + genLinktimeResolvedMethod(dd, retty, name) { buf => + def resolve(tree: Tree): nir.Val = tree match { + case Literal(Constant(_)) => + buf.genExpr(tree) + case If(cond, thenp, elsep) => + buf.genIf(retty, cond, thenp, elsep, ensureLinktime = true) + case tree: Apply if retty == nir.Type.Bool => + val True = ValTree(nir.Val.True) + val False = ValTree(nir.Val.False) + buf.genIf(retty, tree, True, False, ensureLinktime = true) + case Block(stats, expr) => + stats.foreach { v => + globalError( + v.pos, + "Linktime resolved block can only contain other linktime resolved def defintions" + ) + // unused, generated to prevent compiler plugin crash when referencing ident + buf.genExpr(v) + } + resolve(expr) + } + resolve(dd.rhs) + } } + case _ => globalError( dd.pos, - s"Link-time resolved property must have ${ResolvedMethod.fullName} as body" + "Cannot transform to linktime resolved expression" ) None } } - /* Generate stub method that can be used to get value of link-time property at runtime */ private def genLinktimeResolvedMethod( + dd: DefDef, retty: nir.Type, - propertyName: String, methodName: nir.Global - )(implicit pos: nir.Position): nir.Defn = { + )(genValue: ExprBuffer => nir.Val)(implicit pos: nir.Position): nir.Defn = { implicit val fresh: Fresh = Fresh() val buf = new ExprBuffer() - buf.label(fresh()) - val value = buf.call( - Linktime.PropertyResolveFunctionTy(retty), - Linktime.PropertyResolveFunction(retty), - Val.String(propertyName) :: Nil, - Next.None - ) - buf.ret(value) + scoped( + curFresh := fresh, + curMethodSym := dd.symbol, + curMethodThis := None, + curMethodEnv := new MethodEnv(fresh), + curMethodInfo := new CollectMethodInfo, + curUnwindHandler := None + ) { + buf.label(fresh()) + val value = genValue(buf) + buf.ret(value) + } Defn.Define( - Attrs(inlineHint = Attr.AlwaysInline), + Attrs(inlineHint = Attr.AlwaysInline, isLinktimeResolved = true), methodName, Type.Function(Seq.empty, retty), buf.toSeq @@ -1139,22 +1187,29 @@ trait NirGenStat[G <: nsc.Global with Singleton] { self: NirGenPhase[G] => } protected object LinktimeProperty { - def unapply(tree: Tree): Option[(String, nir.Position)] = { + sealed trait Type + object Type { + case object Provided extends Type + case object Calculated extends Type + } + def unapply(tree: Tree): Option[(String, Type, nir.Position)] = { if (tree.symbol == null) None - else { + else tree.symbol .getAnnotation(ResolvedAtLinktimeClass) - .flatMap(_.args.headOption) - .flatMap { - case Literal(Constant(name: String)) => Some((name, tree.pos)) - case _ => + .flatMap(_.args match { + case Literal(Constant(name: String)) :: Nil => + Some(name, Type.Provided, tree.pos) + case _ :: Nil => globalError( tree.symbol.pos, s"Name used to resolve link-time property needs to be non-null literal constant" ) None - } - } + case Nil => + val syntheticName = genName(tree.symbol).mangle + Some(syntheticName, Type.Calculated, tree.pos) + }) } } } diff --git a/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/NirCodeGen.scala b/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/NirCodeGen.scala index 8b356b1e00..9200dbb7ea 100644 --- a/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/NirCodeGen.scala +++ b/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/NirCodeGen.scala @@ -42,6 +42,7 @@ class NirCodeGen(val settings: GenNIR.Settings)(using ctx: Context) protected val curMethodLabels = new util.ScopedVar[MethodLabelsEnv] protected val curMethodThis = new util.ScopedVar[Option[nir.Val]] protected val curMethodIsExtern = new util.ScopedVar[Boolean] + protected var curMethodUsesLinktimeResolvedValues = false protected val curFresh = new util.ScopedVar[nir.Fresh] protected val curUnwindHandler = new util.ScopedVar[Option[nir.Local]] @@ -219,23 +220,4 @@ class NirCodeGen(val settings: GenNIR.Settings)(using ctx: Context) } } - protected object LinktimeProperty { - def unapply(tree: Tree): Option[(String, nir.Position)] = { - if (tree.symbol == null) None - else { - tree.symbol - .getAnnotation(defnNir.ResolvedAtLinktimeClass) - .flatMap(_.argumentConstantString(0)) - .map(_ -> positionsConversions.fromSpan(tree.span)) - .orElse { - report.error( - "Name used to resolve link-time property needs to be non-null literal constant", - tree.sourcePos - ) - None - } - } - } - } - end NirCodeGen diff --git a/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/NirGenExpr.scala b/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/NirGenExpr.scala index f012da59d9..0df35da6f2 100644 --- a/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/NirGenExpr.scala +++ b/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/NirGenExpr.scala @@ -431,8 +431,14 @@ trait NirGenExpr(using Context) { genIf(retty, cond, thenp, elsep) } - private def genIf(retty: nir.Type, condp: Tree, thenp: Tree, elsep: Tree)( - using nir.Position + def genIf( + retty: nir.Type, + condp: Tree, + thenp: Tree, + elsep: Tree, + ensureLinktime: Boolean = false + )(using + nir.Position ): Val = { val thenn, elsen, mergen = fresh() val mergev = Val.Local(fresh(), retty) @@ -441,8 +447,14 @@ trait NirGenExpr(using Context) { given nir.Position = condp.span getLinktimeCondition(condp) match { case Some(cond) => + curMethodUsesLinktimeResolvedValues = true buf.branchLinktime(cond, Next(thenn), Next(elsen)) case None => + if ensureLinktime then + report.error( + "Cannot resolve given condition in linktime, it might be depending on runtime value", + condp.srcPos + ) val cond = genExpr(condp) buf.branch(cond, Next(thenn), Next(elsen))(using condp.span) } @@ -2008,7 +2020,7 @@ trait NirGenExpr(using Context) { condp match { // if(bool) (...) - case Apply(LinktimeProperty(name, position), Nil) => + case Apply(LinktimeProperty(name, _, position), Nil) => Some { SimpleCondition( propertyName = name, @@ -2020,7 +2032,7 @@ trait NirGenExpr(using Context) { // if(!bool) (...) case Apply( Select( - Apply(LinktimeProperty(name, position), Nil), + Apply(LinktimeProperty(name, _, position), Nil), nme.UNARY_! ), Nil @@ -2035,7 +2047,7 @@ trait NirGenExpr(using Context) { // if(property x) (...) case Apply( - Select(LinktimeProperty(name, position), comp), + Select(LinktimeProperty(name, _, position), comp), List(arg @ Literal(Constant(_))) ) => Some { @@ -2052,7 +2064,7 @@ trait NirGenExpr(using Context) { case Apply( Select( Apply( - Select(LinktimeProperty(name, position), nme.EQ), + Select(LinktimeProperty(name, _, position), nme.EQ), List(arg @ Literal(Constant(_))) ), nme.UNARY_! @@ -2535,24 +2547,6 @@ trait NirGenExpr(using Context) { ) } - private object LinktimeProperty { - def unapply(tree: Tree): Option[(String, nir.Position)] = { - if (tree.symbol == null) None - else { - tree.symbol - .getAnnotation(defnNir.ResolvedAtLinktimeClass) - .flatMap(_.argumentConstantString(0).orElse { - report.error( - "Name used to resolve link-time property needs to be non-null literal constant", - tree.sourcePos - ) - None - }) - .zip(Some(fromSpan(tree.span))) - } - } - } - private def labelExcludeUnitValue(label: Local, value: nir.Val.Local)(using nir.Position ): nir.Val = @@ -2604,4 +2598,5 @@ trait NirGenExpr(using Context) { override def ++=(other: nir.Buffer): Unit = this ++= other.toSeq } + } diff --git a/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/NirGenStat.scala b/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/NirGenStat.scala index 716b7f8ce5..72483efbdd 100644 --- a/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/NirGenStat.scala +++ b/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/NirGenStat.scala @@ -221,12 +221,13 @@ trait NirGenStat(using Context) { scoped( curMethodSig := sig ) { - val defn = Defn.Define( - attrs, - name, - sig, - genMethodBody(dd, rhs) - ) + curMethodUsesLinktimeResolvedValues = false + val body = genMethodBody(dd, rhs) + val methodAttrs = + if (curMethodUsesLinktimeResolvedValues) + attrs.copy(isLinktimeResolved = true) + else attrs + val defn = Defn.Define(methodAttrs, name, sig, body) Some(defn) } } @@ -366,45 +367,98 @@ trait NirGenStat(using Context) { dd.sourcePos ) } + val retty = genType(dd.tpt.tpe) + + import LinktimeProperty.Type._ + dd match { + case LinktimeProperty(propertyName, Provided, _) => + if (dd.rhs.symbol == defnNir.UnsafePackage_resolved) Some { + checkExplicitReturnTypeAnnotation(dd, "value resolved at link-time") + genLinktimeResolvedMethod(dd, retty, name) { + _.call( + Linktime.PropertyResolveFunctionTy(retty), + Linktime.PropertyResolveFunction(retty), + Val.String(propertyName) :: Nil, + Next.None + ) + } + } + else { + report.error( + s"Link-time resolved property must have ${defnNir.UnsafePackage_resolved.fullName} as body", + dd.sourcePos + ) + None + } - if (dd.rhs.symbol == defnNir.UnsafePackage_resolved) { - checkExplicitReturnTypeAnnotation(dd, "value resolved at link-time") - dd match { - case LinktimeProperty(propertyName, _) => - val retty = genType(dd.tpt.tpe) - val defn = genLinktimeResolvedMethod(retty, propertyName, name) - Some(defn) - case _ => None - } - } else { - report.error( - s"Link-time resolved property must have ${defnNir.UnsafePackage_resolved.fullName} as body", - dd.sourcePos - ) - None + case LinktimeProperty(_, Calculated, _) => + Some { + genLinktimeResolvedMethod(dd, retty, name) { buf => + def resolve(tree: Tree): nir.Val = tree match { + case Literal(Constant(_)) => + buf.genExpr(tree) + case If(cond, thenp, elsep) => + buf.genIf(retty, cond, thenp, elsep, ensureLinktime = true) + case tree: Apply if retty == nir.Type.Bool => + val True = ValTree(nir.Val.True) + val False = ValTree(nir.Val.False) + buf.genIf(retty, tree, True, False, ensureLinktime = true) + case Block(stats, expr) => + stats.foreach { v => + report.error( + "Linktime resolved block can only contain other linktime resolved def defintions", + v.srcPos + ) + // unused, generated to prevent compiler plugin crash when referencing ident + buf.genExpr(v) + } + expr match { + case Typed(Ident(_), _) | Ident(_) => + report.error( + "Non-inlined terms are not allowed in linktime resolved methods", + expr.srcPos + ) + Val.Zero(retty) + case Typed(tree, _) => resolve(tree) + case tree => resolve(tree) + } + } + resolve(dd.rhs) + } + } + + case _ => + report.error( + "Cannot transform to linktime resolved expression", + dd.srcPos + ) + None } } - /* Generate stub method that can be used to get value of link-time property at runtime */ private def genLinktimeResolvedMethod( + dd: DefDef, retty: nir.Type, - propertyName: String, methodName: nir.Global - )(using nir.Position): Defn = { - given fresh: Fresh = Fresh() + )(genValue: ExprBuffer => nir.Val)(using nir.Position): nir.Defn = { + implicit val fresh: Fresh = Fresh() val buf = new ExprBuffer() - buf.label(fresh()) - val value = buf.call( - Linktime.PropertyResolveFunctionTy(retty), - Linktime.PropertyResolveFunction(retty), - Val.String(propertyName) :: Nil, - Next.None - ) - buf.ret(value) + scoped( + curFresh := fresh, + curMethodSym := dd.symbol, + curMethodThis := None, + curMethodEnv := new MethodEnv(fresh), + curMethodInfo := new CollectMethodInfo, + curUnwindHandler := None + ) { + buf.label(fresh()) + val value = genValue(buf) + buf.ret(value) + } Defn.Define( - Attrs(inlineHint = Attr.AlwaysInline), + Attrs(inlineHint = Attr.AlwaysInline, isLinktimeResolved = true), methodName, Type.Function(Seq.empty, retty), buf.toSeq @@ -688,4 +742,34 @@ trait NirGenStat(using Context) { ) } } + + protected object LinktimeProperty { + enum Type: + case Provided, Calculated + + def unapply(tree: Tree): Option[(String, Type, nir.Position)] = { + if (tree.symbol == null) None + else { + tree.symbol + .getAnnotation(defnNir.ResolvedAtLinktimeClass) + .flatMap { annot => + val pos = positionsConversions.fromSpan(tree.span) + if annot.arguments.isEmpty then + val syntheticName = genName(tree.symbol).mangle + Some(syntheticName, Type.Calculated, pos) + else + annot + .argumentConstantString(0) + .map((_, Type.Provided, pos)) + .orElse { + report.error( + "Name used to resolve link-time property needs to be non-null literal constant", + tree.sourcePos + ) + None + } + } + } + } + } } diff --git a/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/PrepNativeInterop.scala b/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/PrepNativeInterop.scala index cda0200390..cee19ae276 100644 --- a/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/PrepNativeInterop.scala +++ b/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/PrepNativeInterop.scala @@ -53,6 +53,10 @@ class PrepNativeInterop extends PluginPhase { def isExternType(using Context): Boolean = (isScalaModule || sym.isTraitOrInterface) && sym.hasAnnotation(defnNir.ExternClass) + + def isExported(using Context) = + sym.hasAnnotation(defnNir.ExportedClass) || + sym.hasAnnotation(defnNir.ExportAccessorsClass) end extension override def transformDefDef(dd: DefDef)(using Context): Tree = { @@ -63,6 +67,12 @@ class PrepNativeInterop extends PluginPhase { sym.addAnnotation(defnNir.ExternClass) } + if sym.is(Inline) then + if sym.isExtern then + report.error("Extern method cannot be inlined", dd.srcPos) + else if sym.isExported then + report.error("Exported method cannot be inlined", dd.srcPos) + def usesVariadicArgs = sym.paramInfo.stripPoly match { case MethodTpe(paramNames, paramTypes, _) => paramTypes.exists(param => param.isRepeatedParam) @@ -103,6 +113,9 @@ class PrepNativeInterop extends PluginPhase { } } + if sym.is(Inline) && sym.isExported + then report.error("Exported field cannot be inlined", vd.srcPos) + vd } } diff --git a/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/PrepNativeInteropLate.scala b/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/PrepNativeInteropLate.scala new file mode 100644 index 0000000000..357216571c --- /dev/null +++ b/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/PrepNativeInteropLate.scala @@ -0,0 +1,34 @@ +package scala.scalanative.nscplugin + +import dotty.tools.dotc.plugins.PluginPhase +import dotty.tools._ +import dotc._ +import dotc.ast.tpd._ +import dotc.transform.SymUtils.setter +import core.Contexts._ +import core.Definitions +import core.Names._ +import core.Symbols._ +import core.Types._ +import core.StdNames._ +import core.Constants.Constant +import NirGenUtil.ContextCached +import dotty.tools.dotc.core.Flags + +/** This phase does: + * - handle TypeApply -> Apply conversion for intrinsic methods + */ +object PostInlineNativeInterop { + val name = "scalanative-prepareInterop-postinline" +} + +class PostInlineNativeInterop extends PluginPhase { + override val runsAfter = Set(transform.Inlining.name, PrepNativeInterop.name) + override val runsBefore = Set(transform.FirstTransform.name) + val phaseName = PostInlineNativeInterop.name + override def description: String = "prepare ASTs for Native interop" + + def defn(using Context): Definitions = ctx.definitions + def defnNir(using Context): NirDefinitions = NirDefinitions.get + +} diff --git a/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/ScalaNativePlugin.scala b/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/ScalaNativePlugin.scala index 15b2a1b297..c16f7b21bd 100644 --- a/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/ScalaNativePlugin.scala +++ b/nscplugin/src/main/scala-3/scala/scalanative/nscplugin/ScalaNativePlugin.scala @@ -51,5 +51,5 @@ class ScalaNativePlugin extends StandardPlugin: } case (config, _) => config } - List(PrepNativeInterop(), GenNIR(genNirSettings)) + List(PrepNativeInterop(), PostInlineNativeInterop(), GenNIR(genNirSettings)) } diff --git a/posixlib/src/main/scala/scala/scalanative/posix/sys/uio.scala b/posixlib/src/main/scala/scala/scalanative/posix/sys/uio.scala index f3cdd81a4a..fa3c051a41 100644 --- a/posixlib/src/main/scala/scala/scalanative/posix/sys/uio.scala +++ b/posixlib/src/main/scala/scala/scalanative/posix/sys/uio.scala @@ -18,3 +18,21 @@ object uio { def writev(fildes: CInt, iov: Ptr[iovec], iovcnt: CInt): CSSize = extern } + +object uioOps { + import uio.iovec + + implicit class iovecOps(val ptr: Ptr[iovec]) extends AnyVal { + def iov_base: Ptr[Byte] = ptr._1 + def iov_len: CSize = ptr._2 + def iov_base_=(v: Ptr[Byte]): Unit = ptr._1 = v + def iov_len_=(v: CSize): Unit = ptr._2 = v + } + + implicit class iovecValOps(val vec: iovec) extends AnyVal { + def iov_base: Ptr[Byte] = vec._1 + def iov_len: CSize = vec._2 + def iov_base_=(v: Ptr[Byte]): Unit = vec._1 = v + def iov_len_=(v: CSize): Unit = vec._2 = v + } +} diff --git a/project/Build.scala b/project/Build.scala index 1a01460b51..0757fda345 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -9,6 +9,10 @@ import java.io.File.pathSeparator import sbtbuildinfo.BuildInfoPlugin import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._ +import pl.project13.scala.sbt.JmhPlugin +import JmhPlugin.JmhKeys._ +import sbtbuildinfo._ +import sbtbuildinfo.BuildInfoKeys._ import scala.scalanative.sbtplugin.ScalaNativePlugin.autoImport._ import com.jsuereth.sbtpgp.PgpKeys.publishSigned import scala.scalanative.build._ @@ -35,7 +39,8 @@ object Build { junitTestOutputsJVM, junitTestOutputsNative, tests, testsJVM, testsExt, testsExtJVM, sandbox, scalaPartest, scalaPartestRuntime, - scalaPartestTests, scalaPartestJunitTests + scalaPartestTests, scalaPartestJunitTests, + toolsBenchmarks ) // format: on lazy val allMultiScalaProjects = @@ -177,6 +182,38 @@ object Build { } .dependsOn(nir, util, testingCompilerInterface % "test") + lazy val toolsBenchmarks = + MultiScalaProject("toolsBenchmarks", file("tools-benchmarks")) + .enablePlugins(JmhPlugin, BuildInfoPlugin) + .dependsOn(tools % "compile->test") + .settings( + toolSettings, + noPublishSettings, + inConfig(Jmh)( + Def.settings( + sourceDirectory := (Compile / sourceDirectory).value, + classDirectory := (Compile / classDirectory).value, + dependencyClasspath := (Compile / dependencyClasspath).value, + compile := (Jmh / compile).dependsOn(Compile / compile).value, + run := (Jmh / run).dependsOn(Jmh / compile).evaluated + ) + ) + ) + .zippedSettings(Seq("tests")) { + case Seq(tests) => + Def.settings( + // Only generate build info for test configuration + // Compile / buildInfoObject := "TestSuiteBuildInfo", + Compile / buildInfoPackage := "scala.scalanative.benchmarks", + Compile / buildInfoKeys := List( + BuildInfoKey.map(tests / Test / fullClasspath) { + case (key, value) => + ("fullTestSuiteClasspath", value.toList.map(_.data)) + } + ) + ) + } + lazy val sbtScalaNative: Project = project .in(file("sbt-scala-native")) diff --git a/project/ScalaVersions.scala b/project/ScalaVersions.scala index 1094487697..5d8e8a1397 100644 --- a/project/ScalaVersions.scala +++ b/project/ScalaVersions.scala @@ -1,12 +1,29 @@ package build +/* Note to Contributors: + * Scala Native supports a number of Scala versions. These can be + * described as Major.Minor.Path. + * + * Support for Scala 2.12.lowest is provided by binary compatibility with + * Scala 2.12.highest. + * + * This means that Continuous Integration (CI) is run using + * the highest patch version. Scala Native may or may not build from + * from scratch when using lower patch versions. + * + * This information can save time and frustration when preparing + * contributions for submission: Build privately using highest, + * not lowest, patch version. + */ + object ScalaVersions { // Versions of Scala used for publishing compiler plugins - val crossScala212 = (13 to 17).map(v => s"2.12.$v") - val crossScala213 = (4 to 10).map(v => s"2.13.$v") + val crossScala212 = (13 to 18).map(v => s"2.12.$v") + val crossScala213 = (4 to 11).map(v => s"2.13.$v") val crossScala3 = List( (0 to 3).map(v => s"3.1.$v"), - (0 to 2).map(v => s"3.2.$v") + (0 to 2).map(v => s"3.2.$v"), + (0 to 0).map(v => s"3.3.$v") ).flatten // Version of Scala 3 standard library sources used for publishing diff --git a/project/Settings.scala b/project/Settings.scala index 0f2bfa8518..6834555d62 100644 --- a/project/Settings.scala +++ b/project/Settings.scala @@ -452,9 +452,20 @@ object Settings { Test / unmanagedSourceDirectories ++= { val testsRootDir = baseDirectory.value.getParentFile.getParentFile val sharedTestDir = testsRootDir / "shared/src/test" + val scalaVersionDir = CrossVersion + .partialVersion(scalaVersion.value) + .collect { + case (3, _) => "scala3" + case (2, minor) => s"scala2.$minor" + } + .getOrElse(sys.error("Unsupported Scala version")) // Java 8 is reference so start at 9 - (9 to (Global / javaVersion).value).map { v => - sharedTestDir / s"require-jdk$v" + (9 to (Global / javaVersion).value).flatMap { v => + val jdkVersion = s"jdk$v" + Seq( + sharedTestDir / s"require-$jdkVersion", + sharedTestDir / s"require-$scalaVersionDir-$jdkVersion" + ) } } ) diff --git a/project/build.properties b/project/build.properties index f344c14837..ef3d26620e 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.8.2 +sbt.version = 1.8.3 diff --git a/project/build.sbt b/project/build.sbt index 3aa99c2689..f161c05205 100644 --- a/project/build.sbt +++ b/project/build.sbt @@ -15,6 +15,7 @@ addSbtPlugin("org.portable-scala" % "sbt-platform-deps" % "1.0.1") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.0") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.4") libraryDependencies += "org.eclipse.jgit" % "org.eclipse.jgit.pgm" % "5.10.0.202012080955-r" diff --git a/scala-partest-junit-tests/src/test/resources/2.12.18/BlacklistedTests.txt b/scala-partest-junit-tests/src/test/resources/2.12.18/BlacklistedTests.txt new file mode 100644 index 0000000000..3325688a06 --- /dev/null +++ b/scala-partest-junit-tests/src/test/resources/2.12.18/BlacklistedTests.txt @@ -0,0 +1,199 @@ +## Do not compile +scala/lang/annotations/BytecodeTest.scala +scala/lang/annotations/RunTest.scala +scala/lang/traits/BytecodeTest.scala +scala/lang/traits/RunTest.scala +scala/lang/primitives/NaNTest.scala +scala/lang/primitives/BoxUnboxTest.scala +scala/lang/stringinterpol/StringContextTest.scala +scala/collection/SeqTest.scala +scala/collection/Sizes.scala +scala/collection/SetMapConsistencyTest.scala +scala/collection/mutable/OpenHashMapTest.scala +scala/collection/immutable/ListTest.scala +scala/collection/immutable/ListMapTest.scala +scala/collection/immutable/HashMapTest.scala +scala/collection/immutable/HashSetTest.scala +scala/collection/immutable/MapHashcodeTest.scala +scala/collection/immutable/SetTest.scala +scala/collection/immutable/SeqTest.scala +scala/collection/immutable/SmallMapTest.scala +scala/collection/immutable/SortedMapTest.scala +scala/collection/immutable/SortedSetTest.scala +scala/collection/immutable/TreeMapTest.scala +scala/collection/immutable/TreeSetTest.scala +scala/reflect/ClassOfTest.scala +scala/reflect/QTest.scala +scala/reflect/io/AbstractFileTest.scala +scala/reflect/io/ZipArchiveTest.scala +scala/reflect/internal/util/AbstractFileClassLoaderTest.scala +scala/reflect/internal/util/FileUtilsTest.scala +scala/reflect/internal/util/SourceFileTest.scala +scala/reflect/internal/util/StringOpsTest.scala +scala/reflect/internal/util/WeakHashSetTest.scala +scala/reflect/internal/LongNamesTest.scala +scala/reflect/internal/MirrorsTest.scala +scala/reflect/internal/NamesTest.scala +scala/reflect/internal/PositionsTest.scala +scala/reflect/internal/PrintersTest.scala +scala/reflect/internal/ScopeTest.scala +scala/reflect/internal/TreeGenTest.scala +scala/reflect/internal/TypesTest.scala +scala/reflect/runtime/ReflectionUtilsShowTest.scala +scala/reflect/runtime/ThreadSafetyTest.scala +scala/tools/cmd/CommandLineParserTest.scala +scala/tools/nsc/Build.scala +scala/tools/nsc/DeterminismTest.scala +scala/tools/nsc/DeterminismTester.scala +scala/tools/nsc/FileUtils.scala +scala/tools/nsc/GlobalCustomizeClassloaderTest.scala +scala/tools/nsc/PickleWriteTest.scala +scala/tools/nsc/PipelineMainTest.scala +scala/tools/nsc/async/AnnotationDrivenAsync.scala +scala/tools/nsc/async/CustomFuture.scala +scala/tools/nsc/backend/jvm/PerRunInitTest.scala +scala/tools/nsc/backend/jvm/BTypesTest.scala +scala/tools/nsc/backend/jvm/BytecodeTest.scala +scala/tools/nsc/backend/jvm/DefaultMethodTest.scala +scala/tools/nsc/backend/jvm/DirectCompileTest.scala +scala/tools/nsc/backend/jvm/GenericSignaturesTest.scala +scala/tools/nsc/backend/jvm/IndyLambdaTest.scala +scala/tools/nsc/backend/jvm/IndySammyTest.scala +scala/tools/nsc/backend/jvm/InnerClassAttributeTest.scala +scala/tools/nsc/backend/jvm/NestedClassesCollectorTest.scala +scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala +scala/tools/nsc/backend/jvm/StringConcatTest.scala +scala/tools/nsc/backend/jvm/IndyLambdaDirectTest.scala +scala/tools/nsc/backend/jvm/LineNumberTest.scala +scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala +scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala +scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala +scala/tools/nsc/backend/jvm/opt/BoxUnboxTest.scala +scala/tools/nsc/backend/jvm/opt/BoxUnboxAndInlineTest.scala +scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala +scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala +scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala +scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala +scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala +scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala +scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala +scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala +scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala +scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +scala/tools/nsc/backend/jvm/opt/InlineSourceMatcherTest.scala +scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala +scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala +scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala +scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala +scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala +scala/tools/nsc/ScriptRunnerTest.scala +scala/tools/nsc/classpath/AggregateClassPathTest.scala +scala/tools/nsc/classpath/JrtClassPathTest.scala +scala/tools/nsc/classpath/MultiReleaseJarTest.scala +scala/tools/nsc/classpath/PathResolverBaseTest.scala +scala/tools/nsc/classpath/VirtualDirectoryClassPathTest.scala +scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala +scala/tools/nsc/doc/html/HtmlDocletTest.scala +scala/tools/nsc/interpreter/CompletionTest.scala +scala/tools/nsc/interpreter/ScriptedTest.scala +scala/tools/nsc/interpreter/TabulatorTest.scala +scala/tools/nsc/parser/ParserTest.scala +scala/tools/nsc/reporters/ConsoleReporterTest.scala +scala/tools/nsc/reporters/WConfTest.scala +scala/tools/nsc/settings/ScalaVersionTest.scala +scala/tools/nsc/settings/SettingsTest.scala +scala/tools/nsc/settings/TargetTest.scala +scala/tools/nsc/symtab/CannotHaveAttrsTest.scala +scala/tools/nsc/symtab/FlagsTest.scala +scala/tools/nsc/symtab/FreshNameExtractorTest.scala +scala/tools/nsc/symtab/StdNamesTest.scala +scala/tools/nsc/symtab/SymbolLoadersAssociatedFileTest.scala +scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala +scala/tools/nsc/symtab/SymbolTableTest.scala +scala/tools/nsc/symtab/classfile/PicklerTest.scala +scala/tools/nsc/transform/MixinTest.scala +scala/tools/nsc/transform/SpecializationTest.scala +scala/tools/nsc/transform/ThicketTransformerTest.scala +scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala +scala/tools/nsc/transform/patmat/SolvingTest.scala +scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala +scala/tools/nsc/typechecker/Implicits.scala +scala/tools/nsc/typechecker/NamerTest.scala +scala/tools/nsc/typechecker/ParamAliasTest.scala +scala/tools/nsc/typechecker/TypedTreeTest.scala +scala/tools/nsc/typechecker/TreeAttachmentTest.scala +scala/tools/nsc/util/StackTraceTest.scala +scala/tools/testing/AllocationTest.scala +scala/tools/testing/BytecodeTesting.scala +scala/tools/testing/JOL.scala +scala/tools/testing/RunTesting.scala +scala/tools/testing/VirtualCompilerTesting.scala +scala/runtime/BooleanBoxingTest.scala +scala/runtime/ByteBoxingTest.scala +scala/runtime/CharBoxingTest.scala +scala/runtime/ShortBoxingTest.scala +scala/runtime/IntBoxingTest.scala +scala/runtime/LongBoxingTest.scala +scala/runtime/DoubleBoxingTest.scala +scala/runtime/FloatBoxingTest.scala + +#============== +## Do not link +# Defines stubs +scala/collection/mutable/AnyRefMapTest.scala + + +#j.l.reflect.Modifier +scala/reflect/macros/AttachmentsTest.scala +scala/collection/IteratorTest.scala +scala/collection/immutable/StringLikeTest.scala +scala/collection/immutable/VectorTest.scala +scala/collection/immutable/ListSetTest.scala +scala/collection/mutable/MutableListTest.scala +scala/collection/mutable/ArrayBufferTest.scala +scala/concurrent/FutureTest.scala +scala/util/SpecVersionTest.scala +scala/tools/testing/AssertUtil.scala +scala/tools/testing/AssertUtilTest.scala +scala/tools/testing/AssertThrowsTest.scala + +#j.i.ObjectStream +scala/PartialFunctionSerializationTest.scala +scala/MatchErrorSerializationTest.scala +scala/concurrent/duration/SerializationTest.scala +scala/collection/convert/WrapperSerializationTest.scala +scala/collection/immutable/RedBlackTreeSerialFormat.scala +scala/collection/mutable/PriorityQueueTest.scala + +#j.io.Piped{Input,Output}Stream +#j.u.c.LinkedBlockingQueue +scala/sys/process/PipedProcessTest.scala + +#j.u.c.ConcurrentHashMap +scala/collection/convert/NullSafetyToScalaTest.scala +scala/collection/convert/NullSafetyToJavaTest.scala + +# Concurrency primitives +scala/collection/SetMapRulesTest.scala +scala/collection/ParallelConsistencyTest.scala +scala/collection/parallel/immutable/ParRangeTest.scala +scala/collection/concurrent/TrieMapTest.scala +scala/runtime/ScalaRunTimeTest.scala +scala/io/SourceTest.scala +scala/sys/process/ProcessTest.scala + +#============ +## Tests fail + +scala/collection/immutable/StreamTest.scala + +### Deadlocks maybe needs j.u.c.ConcurrentLinkedQueue +scala/concurrent/impl/DefaultPromiseTest.scala +scala/collection/parallel/TaskTest.scala +scala/collection/NewBuilderTest.scala + +#===== +## Assumes JUnit 4.12 +scala/collection/immutable/RangeTest.scala +scala/util/matching/RegexTest.scala \ No newline at end of file diff --git a/scala-partest-junit-tests/src/test/resources/2.13.11/BlacklistedTests-require-threads.txt b/scala-partest-junit-tests/src/test/resources/2.13.11/BlacklistedTests-require-threads.txt new file mode 100644 index 0000000000..bf57d81f24 --- /dev/null +++ b/scala-partest-junit-tests/src/test/resources/2.13.11/BlacklistedTests-require-threads.txt @@ -0,0 +1,2 @@ +scala/collection/convert/MapWrapperTest.scala +scala/collection/concurrent/TrieMapTest.scala \ No newline at end of file diff --git a/scala-partest-junit-tests/src/test/resources/2.13.11/BlacklistedTests.txt b/scala-partest-junit-tests/src/test/resources/2.13.11/BlacklistedTests.txt new file mode 100644 index 0000000000..a19ddc1379 --- /dev/null +++ b/scala-partest-junit-tests/src/test/resources/2.13.11/BlacklistedTests.txt @@ -0,0 +1,240 @@ +## Do not compile +scala/ExtractorTest.scala +scala/OptionTest.scala +scala/SerializationStabilityTest.scala +scala/StringTest.scala +scala/collection/FactoriesTest.scala +scala/collection/LazyZipOpsTest.scala +scala/collection/SeqTest.scala +scala/collection/immutable/HashMapTest.scala +scala/collection/immutable/HashSetTest.scala +scala/collection/immutable/IndexedSeqTest.scala +scala/collection/immutable/IntMapTest.scala +scala/collection/immutable/ListMapTest.scala +scala/collection/immutable/LongMapTest.scala +scala/collection/immutable/MapHashcodeTest.scala +scala/collection/immutable/SeqTest.scala +scala/collection/immutable/SmallMapTest.scala +scala/collection/immutable/SortedMapTest.scala +scala/collection/immutable/SortedSetTest.scala +scala/collection/immutable/TreeMapTest.scala +scala/collection/immutable/TreeSetTest.scala +scala/collection/mutable/ArrayBufferTest.scala +scala/lang/annotations/BytecodeTest.scala +scala/lang/annotations/RunTest.scala +scala/lang/traits/BytecodeTest.scala +scala/lang/traits/RunTest.scala +scala/lang/primitives/NaNTest.scala +scala/math/PartialOrderingTest.scala +scala/reflect/ClassOfTest.scala +scala/reflect/FieldAccessTest.scala +scala/reflect/QTest.scala +scala/reflect/io/ZipArchiveTest.scala +scala/reflect/internal/InferTest.scala +scala/reflect/internal/LongNamesTest.scala +scala/reflect/internal/MirrorsTest.scala +scala/reflect/internal/NamesTest.scala +scala/reflect/internal/PositionsTest.scala +scala/reflect/internal/PrintersTest.scala +scala/reflect/internal/ScopeTest.scala +scala/reflect/internal/TreeGenTest.scala +scala/reflect/internal/TypesTest.scala +scala/reflect/internal/util/AbstractFileClassLoaderTest.scala +scala/reflect/internal/util/FileUtilsTest.scala +scala/reflect/internal/util/SourceFileTest.scala +scala/reflect/internal/util/StringOpsTest.scala +scala/reflect/internal/SubstMapTest.scala +scala/reflect/internal/util/WeakHashSetTest.scala +scala/reflect/io/AbstractFileTest.scala +scala/reflect/runtime/ThreadSafetyTest.scala +scala/reflect/runtime/ReflectionUtilsShowTest.scala +scala/tools/nsc/Build.scala +scala/tools/nsc/DeterminismTest.scala +scala/tools/nsc/DeterminismTester.scala +scala/tools/nsc/FileUtils.scala +scala/tools/nsc/GlobalCustomizeClassloaderTest.scala +scala/tools/nsc/PhaseAssemblyTest.scala +scala/tools/nsc/PickleWriteTest.scala +scala/tools/nsc/PipelineMainTest.scala +scala/tools/nsc/ScriptRunnerTest.scala +scala/tools/nsc/MainRunnerTest.scala +scala/tools/nsc/async/AnnotationDrivenAsyncTest.scala +scala/tools/nsc/async/CustomFuture.scala +scala/tools/nsc/backend/jvm/BTypesTest.scala +scala/tools/nsc/backend/jvm/BytecodeTest.scala +scala/tools/nsc/backend/jvm/DefaultMethodTest.scala +scala/tools/nsc/backend/jvm/DirectCompileTest.scala +scala/tools/nsc/backend/jvm/GenericSignaturesTest.scala +scala/tools/nsc/backend/jvm/IndyLambdaTest.scala +scala/tools/nsc/backend/jvm/IndySammyTest.scala +scala/tools/nsc/backend/jvm/InnerClassAttributeTest.scala +scala/tools/nsc/backend/jvm/LineNumberTest.scala +scala/tools/nsc/backend/jvm/NestedClassesCollectorTest.scala +scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala +scala/tools/nsc/backend/jvm/PerRunInitTest.scala +scala/tools/nsc/backend/jvm/StringConcatTest.scala +scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala +scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala +scala/tools/nsc/backend/jvm/analysis/TypeFlowAnalyzerTest.scala +scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala +scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala +scala/tools/nsc/backend/jvm/opt/BoxUnboxTest.scala +scala/tools/nsc/backend/jvm/opt/BoxUnboxAndInlineTest.scala +scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala +scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala +scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala +scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala +scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala +scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala +scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala +scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala +scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +scala/tools/nsc/backend/jvm/opt/InlineSourceMatcherTest.scala +scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala +scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala +scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala +scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala +scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala +scala/tools/nsc/classpath/AggregateClassPathTest.scala +scala/tools/nsc/classpath/JrtClassPathTest.scala +scala/tools/nsc/classpath/MultiReleaseJarTest.scala +scala/tools/nsc/classpath/PathResolverBaseTest.scala +scala/tools/nsc/classpath/VirtualDirectoryClassPathTest.scala +scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala +scala/tools/nsc/doc/html/HtmlDocletTest.scala +scala/tools/nsc/doc/html/StringLiteralTest.scala +scala/tools/nsc/interpreter/CompletionTest.scala +scala/tools/nsc/interpreter/ScriptedTest.scala +scala/tools/nsc/interpreter/TabulatorTest.scala +scala/tools/nsc/parser/ParserTest.scala +scala/tools/nsc/reporters/ConsoleReporterTest.scala +scala/tools/nsc/reporters/PositionFilterTest.scala +scala/tools/nsc/reporters/WConfTest.scala +scala/tools/nsc/settings/ScalaVersionTest.scala +scala/tools/nsc/settings/SettingsTest.scala +scala/tools/nsc/settings/TargetTest.scala +scala/tools/nsc/symtab/CannotHaveAttrsTest.scala +scala/tools/nsc/symtab/FlagsTest.scala +scala/tools/nsc/symtab/FreshNameExtractorTest.scala +scala/tools/nsc/symtab/StdNamesTest.scala +scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala +scala/tools/nsc/symtab/SymbolTableTest.scala +scala/tools/nsc/symtab/classfile/PicklerTest.scala +scala/tools/nsc/transform/ErasureTest.scala +scala/tools/nsc/transform/MixinTest.scala +scala/tools/nsc/transform/ReleaseFenceTest.scala +scala/tools/nsc/transform/SpecializationTest.scala +scala/tools/nsc/transform/ThicketTransformerTest.scala +scala/tools/nsc/transform/UncurryTest.scala +scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala +scala/tools/nsc/transform/patmat/SolvingTest.scala +scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala +scala/tools/nsc/typechecker/ConstantFolderTest.scala +scala/tools/nsc/typechecker/ImplicitsTest.scala +scala/tools/nsc/typechecker/InferencerTest.scala +scala/tools/nsc/typechecker/NamerTest.scala +scala/tools/nsc/typechecker/OverridingPairsTest.scala +scala/tools/nsc/typechecker/ParamAliasTest.scala +scala/tools/nsc/typechecker/TypedTreeTest.scala +scala/tools/nsc/typechecker/TreeAttachmentTest.scala +scala/tools/nsc/util/StackTraceTest.scala +scala/util/ChainingOpsTest.scala +scala/sys/process/ProcessTest.scala +scala/collection/mutable/OpenHashMapTest.scala +scala/collection/immutable/ListTest.scala +scala/collection/immutable/LazyListTest.scala +scala/collection/Sizes.scala +scala/runtime/BooleanBoxingTest.scala +scala/runtime/ByteBoxingTest.scala +scala/runtime/CharBoxingTest.scala +scala/runtime/ShortBoxingTest.scala +scala/runtime/IntBoxingTest.scala +scala/runtime/LongBoxingTest.scala +scala/runtime/FloatBoxingTest.scala +scala/runtime/DoubleBoxingTest.scala +scala/collection/concurrent/ConcurrentMapTester.scala +scala/collection/concurrent/ConcurrentMapTestHelper.scala +scala/collection/concurrent/TrieMapTest.scala +scala/collection/convert/MapWrapperTest.scala +scala/collection/SetMapRulesTest.scala +scala/collection/IterableTest.scala + +## Do not link +scala/jdk/DurationConvertersTest.scala +scala/jdk/OptionConvertersTest.scala +scala/jdk/StreamConvertersTest.scala +scala/jdk/StreamConvertersTypingTest.scala + +# Uses stubs +scala/collection/mutable/AnyRefMapTest.scala +scala/collection/mutable/ListBufferTest.scala +scala/collection/immutable/ChampMapSmokeTest.scala +scala/collection/immutable/ChampSetSmokeTest.scala +scala/sys/process/ProcessBuilderTest.scala + +#j.l.reflect.Modifier / testkit.AssertUtil +scala/reflect/macros/AttachmentsTest.scala +scala/collection/IteratorTest.scala +scala/collection/immutable/StringLikeTest.scala +scala/concurrent/FutureTest.scala +scala/util/SpecVersionTest.scala +scala/tools/testkit/AssertUtilTest.scala +scala/tools/testkit/ReflectUtilTest.scala + +#j.u.stream.* +scala/jdk/StepperConversionTest.scala +scala/jdk/StepperTest.scala + +#j.i.Object{Input,Output}Stream +scala/PartialFunctionSerializationTest.scala +scala/MatchErrorSerializationTest.scala +scala/collection/convert/WrapperSerializationTest.scala +scala/collection/mutable/PriorityQueueTest.scala +scala/collection/mutable/SerializationTest.scala +scala/collection/immutable/SerializationTest.scala +scala/collection/immutable/LazyListLazinessTest.scala +scala/concurrent/duration/SerializationTest.scala +scala/jdk/FunctionConvertersTest.scala + +#j.io.Piped{Input,Output}Stream / j.u.c.LinkedBlockingQueue +scala/sys/process/PipedProcessTest.scala + +#j.u.c.ConcurrentHashMap +scala/collection/convert/NullSafetyToScalaTest.scala +scala/collection/convert/NullSafetyToJavaTest.scala +scala/collection/convert/CollectionConvertersTest.scala +scala/collection/convert/JConcurrentMapWrapperTest.scala + +#j.t.LocalDate +scala/math/OrderingTest.scala + +#j.l.Class.getDeclaredField +scala/collection/immutable/VectorTest.scala + +#j.l.Thread contextClassloader +scala/io/SourceTest.scala +scala/lang/stringinterpol/StringContextTest.scala + +# Needs newer JUnit version +scala/util/matching/RegexTest.scala +scala/collection/immutable/RangeTest.scala +scala/collection/mutable/BitSetTest.scala + +## Tests fail +scala/ArrayTest.scala +scala/collection/ArrayOpsTest.scala +scala/collection/StringParsersTest.scala +scala/collection/StringOpsTest.scala +scala/collection/convert/JSetWrapperTest.scala +scala/collection/immutable/ArraySeqTest.scala +scala/collection/immutable/LazyListGCTest.scala +scala/collection/immutable/StreamTest.scala +scala/collection/immutable/VectorTest.scala +scala/math/EquivTest.scala +scala/sys/process/ParserTest.scala +scala/util/TryTest.scala +# https://github.com/scala-native/scala-native/issues/2897 +scala/math/BigIntTest.scala +### deadlocks maybe needs j.u.c.ConcurrentLinkedQueue +scala/concurrent/impl/DefaultPromiseTest.scala \ No newline at end of file diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/BlacklistedTests.txt b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/BlacklistedTests.txt new file mode 100644 index 0000000000..a90ea54972 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/BlacklistedTests.txt @@ -0,0 +1,1089 @@ +# Ported from Scala.js, might not be exhaustive enough (some blacklisted tests may actually work in SN) + +# +# POS +# + +# Spuriously fails too often, and causes other subsequent tests to fail too +# Note that this test, by design, stress-tests type checking +pos/t6367.scala + +# +# NEG +# + +# Uses .java files +run/t9200 +run/noInlineUnknownIndy + +# +# RUN +# + +# Tests that ClassTags are cached, which we do not do in Scala.js +# (our ClassTags are better stack-allocated than cached) +run/classtags-cached.scala + +# Relies on the exact toString() representation of Floats/Doubles +run/t2378.scala + +# Using parts of the javalib we don't plan to support + +run/t5018.scala +run/t2417.scala +run/lazy-concurrent.scala +run/t3667.scala +run/t3038d.scala +run/shutdownhooks.scala +run/t5590.scala +run/t3895b.scala +run/t5974.scala +run/t5262.scala +run/serialize-stream.scala +run/lambda-serialization-gc.scala +run/t9390.scala +run/t9390b.scala +run/t9390c.scala +run/trait-defaults-super.scala +run/t2849.scala +run/t10488.scala +run/various-flat-classpath-types.scala + +# Uses j.l.Class stubs +run/t12002.scala +run/t5676.scala + +# Uses java.math.BigDecimal / BigInteger : but failures not due to them +run/is-valid-num.scala + +# Documented semantic difference on String.split(x: Array[Char]) +run/t0325.scala + +# Using Threads +run/inner-obj-auto.scala +run/predef-cycle.scala +run/synchronized.scala +run/sd409.scala + +# Uses java.security +run/t2318.scala + +# Tries to catch java.lang.StackOverflowError +run/t6154.scala + +# Tries to catch java.lang.OutOfMemoryError +run/t7880.scala + +# Requires too much memory (on the JVM, extra memory is given to this test) +run/t11272.scala + +# Taking too much time >60sec + +run/t3989.scala +run/t6253a.scala +run/t6253b.scala +run/t6253c.scala +run/numbereq.scala + +# Using partest properties +run/tailcalls.scala +run/t4294.scala + +# Using IO + +run/t6488.scala +run/t6988.scala + +# Object{Output|Input}Streams +run/defaults-serizaliable-no-forwarders.scala +run/defaults-serizaliable-with-forwarders.scala +run/lambda-serialization-meth-ref.scala +run/red-black-tree-serial +run/red-black-tree-serial-new +run/t6935.scala +run/t8188.scala +run/t9375.scala +run/t9365.scala +run/inlineAddDeserializeLambda.scala +run/sammy_seriazable.scala +run/lambda-serialization-security.scala +run/t10232.scala +run/t10233.scala +run/t10244.scala +run/t10522.scala +run/t11255 +run/transient-object.scala + +# Using System.getProperties + +run/t4426.scala + +# Using Await + +run/t7336.scala +run/t7775.scala +run/t10513.scala +run/future-flatmap-exec-count.scala + +# Using detailed stack trace + +run/t6308.scala + +# Using reflection +run/t6063 + +run/mixin-bridge-methods.scala +run/t5125.scala +run/outertest.scala +run/t6223.scala +run/t5652b +run/elidable-opt.scala +run/nullable-lazyvals.scala +run/t4794.scala +run/t5652 +run/t5652c +run/getClassTest-old.scala +run/t8960.scala +run/t7965.scala +run/t8087.scala +run/t8931.scala +run/t8445.scala +run/t12038a +run/t12038b +run/lambda-serialization.scala + +run/reflection-repl-classes.scala +run/t5256e.scala +run/typetags_core.scala +run/reflection-constructormirror-toplevel-badpath.scala +run/t5276_1b.scala +run/reflection-sorted-decls.scala +run/toolbox_typecheck_implicitsdisabled.scala +run/t5418b.scala +run/toolbox_typecheck_macrosdisabled2.scala +run/abstypetags_serialize.scala +run/all-overridden.scala +run/showraw_tree_kinds.scala +run/showraw_tree_types_ids.scala +run/showraw_tree_types_typed.scala +run/showraw_tree_ids.scala +run/showraw_tree_ultimate.scala +run/t5266_2.scala +run/t5274_1.scala +run/t5224.scala +run/reflection-sanitychecks.scala +run/t6086-vanilla.scala +run/t5277_2.scala +run/reflection-methodsymbol-params.scala +run/reflection-valueclasses-standard.scala +run/t5274_2.scala +run/t5423.scala +run/reflection-modulemirror-toplevel-good.scala +run/t5419.scala +run/t5271_3.scala +run/reflection-enclosed-nested-basic.scala +run/reflection-enclosed-nested-nested-basic.scala +run/fail-non-value-types.scala +run/exprs_serialize.scala +run/t5258a.scala +run/typetags_without_scala_reflect_manifest_lookup.scala +run/t4110-new.scala +run/t5273_2b_newpatmat.scala +run/t6277.scala +run/t5335.scala +run/toolbox_typecheck_macrosdisabled.scala +run/reflection-modulemirror-inner-good.scala +run/t5229_2.scala +run/typetags_multi.scala +run/typetags_without_scala_reflect_typetag_manifest_interop.scala +run/reflection-constructormirror-toplevel-good.scala +run/reflection-magicsymbols-invoke.scala +run/t6392b.scala +run/t5229_1.scala +run/reflection-magicsymbols-vanilla.scala +run/t5225_2.scala +run/runtimeEval1.scala +run/reflection-enclosed-nested-inner-basic.scala +run/reflection-fieldmirror-ctorparam.scala +run/t6181.scala +run/reflection-magicsymbols-repl.scala +run/t5272_2_newpatmat.scala +run/t5270.scala +run/t5418a.scala +run/t5276_2b.scala +run/t5256f.scala +run/reflection-enclosed-basic.scala +run/reflection-constructormirror-inner-badpath.scala +run/interop_typetags_are_manifests.scala +run/newTags.scala +run/t5273_1_newpatmat.scala +run/reflection-constructormirror-nested-good.scala +run/t2236-new.scala +run/existentials3-new.scala +run/t6323b.scala +run/t5943a1.scala +run/reflection-fieldmirror-getsetval.scala +run/t5272_1_oldpatmat.scala +run/t5256h.scala +run/t1195-new.scala +run/t5840.scala +run/reflection-methodsymbol-returntype.scala +run/reflection-fieldmirror-accessorsareokay.scala +run/reflection-sorted-members.scala +run/reflection-allmirrors-tostring.scala +run/valueclasses-typetag-existential.scala +run/toolbox_console_reporter.scala +run/reflection-enclosed-inner-inner-basic.scala +run/t5256b.scala +run/bytecodecs.scala +run/elidable.scala +run/freetypes_false_alarm1.scala +run/freetypes_false_alarm2.scala +run/getClassTest-new.scala +run/idempotency-extractors.scala +run/idempotency-case-classes.scala +run/idempotency-this.scala +run/idempotency-labels.scala +run/idempotency-lazy-vals.scala +run/interop_manifests_are_abstypetags.scala +run/interop_manifests_are_typetags.scala +run/abstypetags_core.scala +run/macro-reify-abstypetag-notypeparams +run/macro-reify-abstypetag-typeparams-tags +run/macro-reify-abstypetag-typeparams-notags +run/macro-reify-abstypetag-usetypetag +run/macro-reify-freevars +run/macro-reify-splice-outside-reify +run/macro-reify-tagless-a +run/macro-reify-type +run/macro-reify-typetag-typeparams-tags +run/macro-reify-typetag-notypeparams +run/macro-undetparams-implicitval +run/manifests-new.scala +run/manifests-old.scala +run/no-pickle-skolems +run/position-val-def.scala +run/reflect-priv-ctor.scala +run/primitive-sigs-2-new.scala +run/primitive-sigs-2-old.scala +run/reflection-enclosed-inner-basic.scala +run/reflection-enclosed-inner-nested-basic.scala +run/reflection-constructormirror-inner-good.scala +run/reflection-constructormirror-nested-badpath.scala +run/reflection-fancy-java-classes +run/reflection-fieldsymbol-navigation.scala +run/reflection-fieldmirror-nmelocalsuffixstring.scala +run/reflection-fieldmirror-getsetvar.scala +run/reflection-fieldmirror-privatethis.scala +run/reflection-implicit.scala +run/reflection-mem-glbs.scala +run/reflection-mem-tags.scala +run/reflection-java-annotations +run/reflection-java-crtp +run/reflection-methodsymbol-typeparams.scala +run/reflection-modulemirror-nested-badpath.scala +run/reflection-modulemirror-inner-badpath.scala +run/reflection-modulemirror-nested-good.scala +run/reflection-modulemirror-toplevel-badpath.scala +run/reflection-sync-subtypes.scala +run/reflinit.scala +run/reflection-valueclasses-derived.scala +run/reflection-valueclasses-magic.scala +run/resetattrs-this.scala +run/runtimeEval2.scala +run/showraw_aliases.scala +run/showraw_mods.scala +run/shortClass.scala +run/showraw_nosymbol.scala +run/showraw_tree.scala +run/showraw_tree_types_untyped.scala +run/t1167.scala +run/t2577.scala +run/t2873.scala +run/t2886.scala +run/t3346j.scala +run/t3507-new.scala +run/t3569.scala +run/t5125b.scala +run/t5225_1.scala +run/t3425b +run/t5256a.scala +run/t5230.scala +run/t5256c.scala +run/t5256g.scala +run/t5266_1.scala +run/t5269.scala +run/t5271_1.scala +run/t5271_2.scala +run/t5271_4.scala +run/t5272_1_newpatmat.scala +run/t5272_2_oldpatmat.scala +run/t5273_1_oldpatmat.scala +run/t5273_2a_newpatmat.scala +run/t5273_2a_oldpatmat.scala +run/t5275.scala +run/t5276_1a.scala +run/t5276_2a.scala +run/t5277_1.scala +run/t5279.scala +run/t5334_1.scala +run/t5334_2.scala +run/t5415.scala +run/t5418.scala +run/t5704.scala +run/t5710-1.scala +run/t5710-2.scala +run/t5770.scala +run/t5894.scala +run/t5816.scala +run/t5824.scala +run/t5912.scala +run/t5942.scala +run/t5943a2.scala +run/t6023.scala +run/t6113.scala +run/t6175.scala +run/t6178.scala +run/t6199-mirror.scala +run/t6199-toolbox.scala +run/t6240-universe-code-gen.scala +run/t6221 +run/t6260b.scala +run/t6259.scala +run/t6287.scala +run/t6344.scala +run/t6392a.scala +run/t6591_1.scala +run/t6591_2.scala +run/t6591_3.scala +run/t6591_5.scala +run/t6591_6.scala +run/t6591_7.scala +run/t6608.scala +run/t6677.scala +run/t6687.scala +run/t6715.scala +run/t6719.scala +run/t6793.scala +run/t6860.scala +run/t6793b.scala +run/t6793c.scala +run/t7045.scala +run/t7046.scala +run/t7008-scala-defined +run/t7120b.scala +run/t7151.scala +run/t7214.scala +run/t7235.scala +run/t7331a.scala +run/t7331b.scala +run/t7331c.scala +run/t7558.scala +run/t7556 +run/t7779.scala +run/t7868b.scala +run/toolbox_current_run_compiles.scala +run/toolbox_default_reporter_is_silent.scala +run/toolbox_parse_package.scala +run/toolbox_silent_reporter.scala +run/toolbox_typecheck_inferimplicitvalue.scala +run/typetags_serialize.scala +run/valueclasses-typetag-basic.scala +run/WeakHashSetTest.scala +run/valueclasses-typetag-generic.scala +run/t4023.scala +run/t4024.scala +run/t6380.scala +run/t5273_2b_oldpatmat.scala +run/t8104 +run/t8047.scala +run/t6992 +run/var-arity-class-symbol.scala +run/typetags_symbolof_x.scala +run/typecheck +run/t8190.scala +run/t8192 +run/t8177f.scala +run/t7932.scala +run/t7700.scala +run/t7570c.scala +run/t7570b.scala +run/t7533.scala +run/t7570a.scala +run/t7044 +run/t7328.scala +run/t6733.scala +run/t6554.scala +run/t6732.scala +run/t6379 +run/t6411b.scala +run/t6411a.scala +run/t6260c.scala +run/t6260-delambdafy.scala +run/showdecl +run/reflection-sync-potpourri.scala +run/reflection-tags.scala +run/reflection-companiontype.scala +run/reflection-scala-annotations.scala +run/reflection-idtc.scala +run/macro-reify-nested-b2 +run/mixin-signatures.scala +run/reflection-companion.scala +run/macro-reify-nested-b1 +run/macro-reify-nested-a2 +run/macro-reify-nested-a1 +run/macro-reify-chained2 +run/macro-reify-chained1 +run/inferred-type-constructors.scala +run/mirror_symbolof_x.scala +run/t8196.scala +run/t8549b.scala +run/t8574.scala +run/t8637.scala +run/t6622.scala +run/toolbox_expand_macro.scala +run/toolbox-varargs +run/t9252.scala +run/t9182.scala +run/t9102.scala +run/t720.scala +run/t9408.scala +run/t10527.scala +run/t10650 +run/trait-default-specialize.scala +run/lazy-locals-2.scala +run/t5294.scala +run/trait_fields_final.scala +run/trait_fields_bytecode.scala +run/trait_fields_volatile.scala +run/junitForwarders +run/reflect-java-param-names +run/t2251b.scala +run/t8253.scala +run/t9027.scala + +run/reify_classfileann_a.scala +run/reify_classfileann_b.scala +run/reify_newimpl_29.scala +run/reify_magicsymbols.scala +run/reify_inheritance.scala +run/reify_newimpl_12.scala +run/reify_typerefs_2b.scala +run/reify_csv.scala +run/reify_inner2.scala +run/reify_maps_oldpatmat.scala +run/reify_newimpl_43.scala +run/reify_nested_inner_refers_to_local.scala +run/reify_closure7.scala +run/reify_closure8b.scala +run/reify_typerefs_3b.scala +run/reify_newimpl_44.scala +run/reify_newimpl_06.scala +run/reify_newimpl_05.scala +run/reify_newimpl_20.scala +run/reify_newimpl_23.scala +run/reify_metalevel_breach_-1_refers_to_1.scala +run/reify_newimpl_41.scala +run/reify-repl-fail-gracefully.scala +run/reify_fors_oldpatmat.scala +run/reify_inner3.scala +run/reify_closure8a.scala +run/reify_closures10.scala +run/reify_ann2a.scala +run/reify_newimpl_51.scala +run/reify_newimpl_47.scala +run/reify_extendbuiltins.scala +run/reify_newimpl_30.scala +run/reify_newimpl_38.scala +run/reify_closure2a.scala +run/reify_newimpl_45.scala +run/reify_closure1.scala +run/reify_generic2.scala +run/reify_printf.scala +run/reify_closure6.scala +run/reify_newimpl_37.scala +run/reify_newimpl_35.scala +run/reify_typerefs_3a.scala +run/reify_newimpl_25.scala +run/reify_ann4.scala +run/reify_typerefs_1b.scala +run/reify_newimpl_22.scala +run/reify_this.scala +run/reify_typerefs_2a.scala +run/reify_newimpl_03.scala +run/reify_newimpl_48.scala +run/reify_varargs.scala +run/reify_newimpl_42.scala +run/reify_newimpl_15.scala +run/reify_nested_inner_refers_to_global.scala +run/reify_newimpl_02.scala +run/reify_newimpl_01.scala +run/reify_fors_newpatmat.scala +run/reify_nested_outer_refers_to_local.scala +run/reify_newimpl_13.scala +run/reify_closure5a.scala +run/reify_inner4.scala +run/reify_sort.scala +run/reify_ann1a.scala +run/reify_closure4a.scala +run/reify_newimpl_33.scala +run/reify_sort1.scala +run/reify_properties.scala +run/reify_generic.scala +run/reify_newimpl_27.scala +run/reify-aliases.scala +run/reify_ann3.scala +run/reify-staticXXX.scala +run/reify_ann1b.scala +run/reify_ann5.scala +run/reify_anonymous.scala +run/reify-each-node-type.scala +run/reify_copypaste2.scala +run/reify_closure3a.scala +run/reify_copypaste1.scala +run/reify_complex.scala +run/reify_for1.scala +run/reify_getter.scala +run/reify_implicits-new.scala +run/reify_inner1.scala +run/reify_implicits-old.scala +run/reify_lazyunit.scala +run/reify_lazyevaluation.scala +run/reify_maps_newpatmat.scala +run/reify_metalevel_breach_+0_refers_to_1.scala +run/reify_metalevel_breach_-1_refers_to_0_a.scala +run/reify_metalevel_breach_-1_refers_to_0_b.scala +run/reify_nested_outer_refers_to_global.scala +run/reify_newimpl_04.scala +run/reify_newimpl_14.scala +run/reify_newimpl_11.scala +run/reify_newimpl_18.scala +run/reify_newimpl_19.scala +run/reify_newimpl_31.scala +run/reify_newimpl_21.scala +run/reify_newimpl_36.scala +run/reify_newimpl_39.scala +run/reify_newimpl_40.scala +run/reify_newimpl_49.scala +run/reify_newimpl_50.scala +run/reify_newimpl_52.scala +run/reify_renamed_term_basic.scala +run/reify_renamed_term_local_to_reifee.scala +run/reify_renamed_term_overloaded_method.scala +run/reify_renamed_type_basic.scala +run/reify_renamed_type_local_to_reifee.scala +run/reify_renamed_type_spliceable.scala +run/reify_typerefs_1a.scala +run/reify_timeofday.scala +run/reify_renamed_term_t5841.scala + +run/t7521b.scala +run/t8575b.scala +run/t8575c.scala +run/t8944c.scala +run/t9535.scala +run/t9437a +run/t9814.scala +run/t10009.scala +run/t10075.scala +run/t10075b + +run/t8756.scala +run/inferred-type-constructors-hou.scala +run/trait-static-forwarder +run/SD-235.scala +run/t10026.scala +run/checkinit.scala +run/reflection-clinit +run/reflection-clinit-nested +run/t10487.scala + +run/typetags_caching.scala +run/type-tag-leak.scala +run/t10856.scala + +# Uses reflection indirectly through +# scala.runtime.ScalaRunTime.replStringOf +run/t6634.scala + +# Using reflection to invoke macros. These tests actually don't require +# or test reflection, but use it to separate compilation units nicely. +# It's a pity we cannot use them + +run/macro-abort-fresh +run/macro-expand-varargs-explicit-over-nonvarargs-bad +run/macro-invalidret-doesnt-conform-to-def-rettype +run/macro-invalidret-nontypeable +run/macro-invalidusage-badret +run/macro-invalidusage-partialapplication +run/macro-invalidusage-partialapplication-with-tparams +run/macro-reflective-ma-normal-mdmi +run/macro-reflective-mamd-normal-mi + +# Using macros, but indirectly creating calls to reflection +run/macro-reify-unreify + +# Using Enumeration in a way we cannot fix + +run/enums.scala +run/t3719.scala +run/t8611b.scala + +# Expecting exceptions that are linking errors in Scala.js (e.g. NoSuchMethodException) +run/t10334.scala + +# Playing with classfile format + +run/classfile-format-51.scala +run/classfile-format-52.scala + +# Concurrent collections (TrieMap) +# has too much stuff implemented in *.java, so no support +run/triemap-hash.scala + +# Using parallel collections +run/hashset.scala +run/t8549.scala +run/t5375.scala +run/t4894.scala +run/ctries-new +run/collection-conversions.scala +run/concurrent-map-conversions.scala +run/t4761.scala +run/t7498.scala +run/t6448.scala +run/ctries-old +run/map_java_conversions.scala +run/parmap-ops.scala +run/pc-conversions.scala +run/t4459.scala +run/t4608.scala +run/t4723.scala +run/t4895.scala +run/t6052.scala +run/t6410.scala +run/t6467.scala +run/t6908.scala +run/t8955.scala + +# Using scala.xml + +run/t4124.scala + +# Using Swing + +run/t3613.scala + +# Using the REPL + +run/t4285.scala +run/constant-type.scala +run/repl-bare-expr.scala +run/repl-parens.scala +run/repl-assign.scala +run/t5583.scala +run/treePrint.scala +run/constrained-types.scala +run/repl-power.scala +run/t4710.scala +run/repl-paste.scala +run/repl-reset.scala +run/repl-paste-3.scala +run/t6329_repl.scala +run/t6273.scala +run/repl-paste-2.scala +run/t5655.scala +run/t5072.scala +run/repl-colon-type.scala +run/repl-trim-stack-trace.scala +run/t4594-repl-settings.scala +run/repl-save.scala +run/repl-paste-raw.scala +run/repl-paste-4.scala +run/t7801.scala +run/repl-backticks.scala +run/t6633.scala +run/repl-inline.scala +run/repl-class-based-term-macros.scala +run/repl-always-use-instance.scala +run/repl-class-based-implicit-import.scala +run/repl-class-based-value-class.scala +run/repl-deadlock.scala +run/repl-class-based-outer-pointers.scala +run/repl-class-based-escaping-reads.scala + +# Using the Repl (scala.tools.partest.ReplTest) +run/class-symbol-contravariant.scala +run/lub-visibility.scala +run/macro-bundle-repl.scala +run/macro-repl-basic.scala +run/macro-repl-dontexpand.scala +run/macro-system-properties.scala +run/reflection-equality.scala +run/reflection-repl-elementary.scala +run/reify_newimpl_26.scala +run/repl-out-dir.scala +run/repl-term-macros.scala +run/repl-transcript.scala +run/repl-type-verbose.scala +run/t3376.scala +run/t4025.scala +run/t4172.scala +run/t4216.scala +run/t4542.scala +run/t4671.scala +run/t5256d.scala +run/t5535.scala +run/t5537.scala +run/t5789.scala +run/t6086-repl.scala +run/t6146b.scala +run/t6187.scala +run/t6320.scala +run/t6381.scala +run/t6434.scala +run/t6439.scala +run/t6507.scala +run/t6549.scala +run/t6937.scala +run/t7185.scala +run/t7319.scala +run/t7482a.scala +run/t7634.scala +run/t7747-repl.scala +run/t7805-repl-i.scala +run/tpeCache-tyconCache.scala +run/repl-empty-package +run/repl-javap-def.scala +run/repl-javap-mem.scala +run/repl-javap-outdir +run/repl-javap.scala +run/t6329_repl_bug.scala +run/t4950.scala +run/xMigration.scala +run/t6541-option.scala +run/repl-serialization.scala +run/t9174.scala +run/repl-paste-5.scala +run/repl-no-uescape.scala +run/repl-no-imports-no-predef-classbased.scala +run/repl-implicits-nopredef.scala +run/repl-classbased.scala +run/repl-no-imports-no-predef-power.scala +run/repl-paste-b.scala +run/repl-paste-6.scala +run/repl-implicits.scala +run/repl-no-imports-no-predef.scala +run/repl-paste-raw-b.scala +run/repl-paste-raw-c.scala +run/t9749-repl-dot.scala +run/trait_fields_repl.scala +run/t7139 +run/t9689 +run/trailing-commas.scala +run/t4700.scala +run/t9880-9881.scala +run/repl-kind.scala +run/t10284.scala +run/t9016.scala +run/repl-completions.scala +run/t10956.scala +run/t11564.scala +run/t11402.scala + +# Using Scala Script (partest.ScriptTest) + +run/t7711-script-args.scala +run/t4625.scala +run/t4625c.scala +run/t4625b.scala + +# Using the compiler API + +run/t2512.scala +run/analyzerPlugins.scala +run/compiler-asSeenFrom.scala +run/t5603.scala +run/t6440.scala +run/t5545.scala +run/existentials-in-compiler.scala +run/global-showdef.scala +run/stream_length.scala +run/annotatedRetyping.scala +run/imain.scala +run/existential-rangepos.scala +run/delambdafy_uncurry_byname_inline.scala +run/delambdafy_uncurry_byname_method.scala +run/delambdafy_uncurry_inline.scala +run/delambdafy_t6555.scala +run/delambdafy_uncurry_method.scala +run/delambdafy_t6028.scala +run/memberpos.scala +run/programmatic-main.scala +run/reflection-names.scala +run/settings-parse.scala +run/sm-interpolator.scala +run/t1501.scala +run/t1500.scala +run/sammy_java8.scala +run/t1618.scala +run/t2464 +run/t4072.scala +run/t5064.scala +run/t5385.scala +run/t5699.scala +run/t5717.scala +run/t5940.scala +run/t6028.scala +run/t6194.scala +run/t6669.scala +run/t6745-2.scala +run/t7096.scala +run/t7271.scala +run/t7337.scala +run/t7398.scala +run/t7569.scala +run/t7852.scala +run/t7817-tree-gen.scala +run/t7825.scala + +# partest.ParserTest +run/t3368.scala +run/t3368-b.scala +run/t3368-c.scala +run/t3368-d.scala +run/t9944.scala + +# partest.DirectTest +run/maxerrs.scala +run/t6288.scala +run/t6331.scala +run/t6440b.scala +run/t6555.scala +run/t7876.scala +run/typetags_without_scala_reflect_typetag_lookup.scala +run/dynamic-updateDynamic.scala +run/dynamic-selectDynamic.scala +run/dynamic-applyDynamic.scala +run/dynamic-applyDynamicNamed.scala +run/t4841-isolate-plugins +run/large_code.scala +run/macroPlugins-namerHooks.scala +run/t4841-no-plugin.scala +run/t4332.scala +run/t8029.scala +run/t8046 +run/t5905-features.scala +run/t5905b-features.scala +run/large_class.scala +run/t8708_b +run/icode-reader-dead-code.scala +run/t5938.scala +run/t8502.scala +run/t6502.scala +run/t8907.scala +run/t9097.scala +run/macroPlugins-enterStats.scala +run/sbt-icode-interface.scala +run/t8502b.scala +run/repl-paste-parse.scala +run/t5463.scala +run/t8433.scala +run/sd275.scala +run/sd275-java +run/t10471.scala +run/t6130.scala +run/t9437b.scala +run/t10552 +run/sd187.scala +run/patmat-origtp-switch.scala +run/indyLambdaKinds +run/indy-via-macro-class-constant-bsa +run/indy-via-macro-method-type-bsa +run/indy-via-macro-reflector +run/t11802-pluginsdir +run/t12019 + +# Using partest.SessionTest +run/t12354.scala + +# Using partest.StoreReporterDirectTest +run/t10171 + +# partest.StubErrorMessageTest +run/StubErrorBInheritsFromA.scala +run/StubErrorComplexInnerClass.scala +run/StubErrorHK.scala +run/StubErrorReturnTypeFunction.scala +run/StubErrorReturnTypeFunction2.scala +run/StubErrorReturnTypePolyFunction.scala +run/StubErrorSubclasses.scala +run/StubErrorTypeclass.scala +run/StubErrorTypeDef.scala + +# partest.CompilerTest +run/t8852a.scala +run/t12062.scala + +# partest.ASMConverters +run/t9403 + +# partest.BytecodeTest +run/t7106 +run/t7974 +run/t8601-closure-elim.scala +run/t4788 +run/t4788-separate-compilation + +# partest.SessionTest +run/t8843-repl-xlat.scala +run/t9206.scala +run/t9170.scala +run/t8918-unary-ids.scala +run/t1931.scala +run/t8935-class.scala +run/t8935-object.scala + +# partest.JavapTest +run/t8608-no-format.scala + +# Using .java source files + +run/t4317 +run/t4238 +run/t2296c +run/t4119 +run/t4283 +run/t4891 +run/t6168 +run/t6168b +run/t6240a +run/t6240b +run/t6548 +run/t6989 +run/t7008 +run/t7246 +run/t7246b +run/t7359 +run/t7439 +run/t7455 +run/t7510 +run/t7582-private-within +run/t7582 +run/t7582b +run/t3897 +run/t7374 +run/t3452e +run/t3452g +run/t3452d +run/t3452b +run/t3452a +run/t1430 +run/t4729 +run/t8442 +run/t8601e +run/t9298 +run/t9298b +run/t9359 +run/t7741a +run/t7741b +run/bcodeInlinerMixed +run/t9268 +run/t9489 +run/t9915 +run/t10059 +run/t1459 +run/t1459generic +run/t3236 +run/t9013 +run/t10231 +run/t10067 +run/t10249 +run/sd143 +run/t4283b +run/t7936 +run/t7936b +run/t9937 +run/t10368 +run/t10334b +run/sd304 +run/t10450 +run/t10042 +run/t10699 +run/t11109 +run/t9529 +run/t9529-types +run/t10490 +run/t10490-2 +run/t10889 +run/t3899 +run/t11373 +run/t8928 +run/indy-meth-refs-j + +# Using scala-script +run/t7791-script-linenums.scala + +# Using scalap +run/scalapInvokedynamic.scala + +# Using Manifests (which use Class.getInterfaces) +run/valueclasses-manifest-existential.scala +run/existentials3-old.scala +run/t2236-old.scala +run/interop_manifests_are_classtags.scala +run/valueclasses-manifest-generic.scala +run/valueclasses-manifest-basic.scala +run/t1195-old.scala +run/t3758-old.scala +run/t4110-old.scala +run/t6246.scala + +# Using ScalaRunTime.stringOf +run/value-class-extractor-seq.scala +run/t3493.scala + +# Custom invoke dynamic node +run/indy-via-macro +run/indy-via-macro-with-dynamic-args + +### Bugs +run/classtags_core.scala +run/classmanifests_new_core.scala +run/classmanifests_new_alias.scala + +## Compiler +run/anyval-box-types.scala +run/structural.scala +run/t266.scala +run/t8601b.scala +run/t8601d.scala +run/t10069b.scala + +## JVM compliance +run/try-catch-unify.scala +run/t2755.scala +run/java-erasure.scala + +## Fails +run/t5680.scala +run/t5914.scala + +## Build mode dependent +run/t6443.scala +run/t8888.scala +run/delambdafy-dependent-on-param-subst.scala +run/lisp.scala +run/number-parsing.scala + +## Check not passing +run/t4300.scala +run/t3361.scala +run/t8017 +run/t8334.scala +run/t8803.scala +run/t9697.scala +run/t10290.scala + +## Other +run/richs.scala \ No newline at end of file diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t11952b.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t11952b.check new file mode 100644 index 0000000000..a5211b1337 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t11952b.check @@ -0,0 +1,17 @@ +[running phase parser on t11952b.scala] +[running phase namer on t11952b.scala] +[running phase packageobjects on t11952b.scala] +[running phase typer on t11952b.scala] +[running phase nativeinterop on t11952b.scala] +[running phase patmat on t11952b.scala] +[running phase superaccessors on t11952b.scala] +[running phase extmethods on t11952b.scala] +[running phase pickler on t11952b.scala] +[running phase refchecks on t11952b.scala] +t11952b.scala:9: error: overriding method f in class C of type => String; + method f cannot override final member; + found : => scala.this.Int + required: => String + override def f: Int = 42 + ^ +one error found diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t6446-additional.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t6446-additional.check new file mode 100644 index 0000000000..8b89521070 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t6446-additional.check @@ -0,0 +1,29 @@ + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + nativeinterop 5 prepare ASTs for Native interop + patmat 6 translate match expressions +superaccessors 7 add super accessors in traits and nested classes + extmethods 8 add extension methods for inline classes + pickler 9 serialize symbol tables + refchecks 10 reference/override checking, translate nested objects + uncurry 11 uncurry, translate function values to anonymous classes + fields 12 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 13 replace tail calls by jumps + specialize 14 @specialized-driven class and method specialization + explicitouter 15 this refs to outer pointers + erasure 16 erase types, add interfaces for traits + posterasure 17 clean up erased inline classes + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + nir 22 + cleanup 23 platform-specific cleanups, generate reflective calls + delambdafy 24 remove lambdas + jvm 25 generate JVM bytecode + ploogin 26 A sample phase that does so many things it's kind of hard... + terminal 27 the last phase during a compilation run diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t6446-list.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t6446-list.check new file mode 100644 index 0000000000..eba706333b --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t6446-list.check @@ -0,0 +1,2 @@ +ploogin - A sample plugin for testing. +nir - Compile to Scala Native IR (NIR) diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t6446-missing.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t6446-missing.check new file mode 100644 index 0000000000..a82e833901 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t6446-missing.check @@ -0,0 +1,29 @@ +Error: unable to load class: t6446.Ploogin + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + nativeinterop 5 prepare ASTs for Native interop + patmat 6 translate match expressions +superaccessors 7 add super accessors in traits and nested classes + extmethods 8 add extension methods for inline classes + pickler 9 serialize symbol tables + refchecks 10 reference/override checking, translate nested objects + uncurry 11 uncurry, translate function values to anonymous classes + fields 12 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 13 replace tail calls by jumps + specialize 14 @specialized-driven class and method specialization + explicitouter 15 this refs to outer pointers + erasure 16 erase types, add interfaces for traits + posterasure 17 clean up erased inline classes + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + nir 22 + cleanup 23 platform-specific cleanups, generate reflective calls + delambdafy 24 remove lambdas + jvm 25 generate JVM bytecode + terminal 26 the last phase during a compilation run diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t6446-show-phases.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t6446-show-phases.check new file mode 100644 index 0000000000..5fe052ad3f --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t6446-show-phases.check @@ -0,0 +1,28 @@ + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + nativeinterop 5 prepare ASTs for Native interop + patmat 6 translate match expressions +superaccessors 7 add super accessors in traits and nested classes + extmethods 8 add extension methods for inline classes + pickler 9 serialize symbol tables + refchecks 10 reference/override checking, translate nested objects + uncurry 11 uncurry, translate function values to anonymous classes + fields 12 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 13 replace tail calls by jumps + specialize 14 @specialized-driven class and method specialization + explicitouter 15 this refs to outer pointers + erasure 16 erase types, add interfaces for traits + posterasure 17 clean up erased inline classes + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + nir 22 + cleanup 23 platform-specific cleanups, generate reflective calls + delambdafy 24 remove lambdas + jvm 25 generate JVM bytecode + terminal 26 the last phase during a compilation run diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t7494-no-options.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t7494-no-options.check new file mode 100644 index 0000000000..803585d330 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/neg/t7494-no-options.check @@ -0,0 +1,30 @@ +error: Error: ploogin takes no options + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + nativeinterop 5 prepare ASTs for Native interop + patmat 6 translate match expressions +superaccessors 7 add super accessors in traits and nested classes + extmethods 8 add extension methods for inline classes + pickler 9 serialize symbol tables + refchecks 10 reference/override checking, translate nested objects + uncurry 11 uncurry, translate function values to anonymous classes + fields 12 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 13 replace tail calls by jumps + specialize 14 @specialized-driven class and method specialization + explicitouter 15 this refs to outer pointers + erasure 16 erase types, add interfaces for traits + posterasure 17 clean up erased inline classes + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + nir 22 + cleanup 23 platform-specific cleanups, generate reflective calls + delambdafy 24 remove lambdas + jvm 25 generate JVM bytecode + ploogin 26 A sample phase that does so many things it's kind of hard... + terminal 27 the last phase during a compilation run diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/classof.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/classof.check new file mode 100644 index 0000000000..21bf4cfb41 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/classof.check @@ -0,0 +1,22 @@ +Value types: +class scala.scalanative.runtime.PrimitiveUnit +class scala.scalanative.runtime.PrimitiveBoolean +class scala.scalanative.runtime.PrimitiveByte +class scala.scalanative.runtime.PrimitiveShort +class scala.scalanative.runtime.PrimitiveChar +class scala.scalanative.runtime.PrimitiveInt +class scala.scalanative.runtime.PrimitiveLong +class scala.scalanative.runtime.PrimitiveFloat +class scala.scalanative.runtime.PrimitiveDouble +Class types +class SomeClass +class scala.collection.immutable.List +class scala.Tuple2 +Arrays: +class scala.scalanative.runtime.ObjectArray +class scala.scalanative.runtime.IntArray +class scala.scalanative.runtime.DoubleArray +class scala.scalanative.runtime.ObjectArray +Functions: +interface scala.Function2 +interface scala.Function1 diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/classtags_contextbound.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/classtags_contextbound.check new file mode 100644 index 0000000000..5d3106c9bc --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/classtags_contextbound.check @@ -0,0 +1 @@ +class scala.scalanative.runtime.IntArray diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/classtags_multi.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/classtags_multi.check new file mode 100644 index 0000000000..ab1c14e439 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/classtags_multi.check @@ -0,0 +1,5 @@ +Int +Array[scala.scalanative.runtime.PrimitiveInt] +Array[java.lang.Object] +Array[java.lang.Object] +Array[java.lang.Object] diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/getClassTest-valueClass.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/getClassTest-valueClass.check new file mode 100644 index 0000000000..cee2875fff --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/getClassTest-valueClass.check @@ -0,0 +1,2 @@ +class scala.scalanative.runtime.PrimitiveInt +class V diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/interop_classtags_are_classmanifests.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/interop_classtags_are_classmanifests.check new file mode 100644 index 0000000000..5ef5b7138c --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/interop_classtags_are_classmanifests.check @@ -0,0 +1,3 @@ +Int +java.lang.String +Array[scala.scalanative.runtime.PrimitiveInt] diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/t4753.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/t4753.check new file mode 100644 index 0000000000..9a020c1ead --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/t4753.check @@ -0,0 +1 @@ +class scala.scalanative.runtime.PrimitiveBoolean diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/t5568.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/t5568.check new file mode 100644 index 0000000000..0018046644 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/t5568.check @@ -0,0 +1,9 @@ +class scala.scalanative.runtime.PrimitiveUnit +class scala.scalanative.runtime.PrimitiveInt +class scala.runtime.BoxedUnit +class scala.runtime.BoxedUnit +class java.lang.Integer +class java.lang.Integer +5 +5 +5 diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/t5923b.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/t5923b.check new file mode 100644 index 0000000000..a4885c883f --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/t5923b.check @@ -0,0 +1,3 @@ +class scala.scalanative.runtime.ObjectArray +class scala.scalanative.runtime.ObjectArray +class scala.scalanative.runtime.ObjectArray diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/t6318_primitives.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/t6318_primitives.check new file mode 100644 index 0000000000..1b64e046c7 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.12.18/run/t6318_primitives.check @@ -0,0 +1,54 @@ +Checking if class scala.scalanative.runtime.PrimitiveByte matches class scala.scalanative.runtime.PrimitiveByte +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveByte matches class scala.scalanative.runtime.PrimitiveShort +None +Checking if class java.lang.Byte matches class scala.scalanative.runtime.PrimitiveByte +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveShort matches class scala.scalanative.runtime.PrimitiveShort +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveShort matches class scala.scalanative.runtime.PrimitiveChar +None +Checking if class java.lang.Short matches class scala.scalanative.runtime.PrimitiveShort +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveChar matches class scala.scalanative.runtime.PrimitiveChar +Some() +Checking if class scala.scalanative.runtime.PrimitiveChar matches class scala.scalanative.runtime.PrimitiveInt +None +Checking if class java.lang.Character matches class scala.scalanative.runtime.PrimitiveChar +Some() +Checking if class scala.scalanative.runtime.PrimitiveInt matches class scala.scalanative.runtime.PrimitiveInt +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveInt matches class scala.scalanative.runtime.PrimitiveLong +None +Checking if class java.lang.Integer matches class scala.scalanative.runtime.PrimitiveInt +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveLong matches class scala.scalanative.runtime.PrimitiveLong +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveLong matches class scala.scalanative.runtime.PrimitiveFloat +None +Checking if class java.lang.Long matches class scala.scalanative.runtime.PrimitiveLong +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveFloat matches class scala.scalanative.runtime.PrimitiveFloat +Some(1.0) +Checking if class scala.scalanative.runtime.PrimitiveFloat matches class scala.scalanative.runtime.PrimitiveDouble +None +Checking if class java.lang.Float matches class scala.scalanative.runtime.PrimitiveFloat +Some(1.0) +Checking if class scala.scalanative.runtime.PrimitiveDouble matches class scala.scalanative.runtime.PrimitiveDouble +Some(1.0) +Checking if class scala.scalanative.runtime.PrimitiveDouble matches class scala.scalanative.runtime.PrimitiveBoolean +None +Checking if class java.lang.Double matches class scala.scalanative.runtime.PrimitiveDouble +Some(1.0) +Checking if class scala.scalanative.runtime.PrimitiveBoolean matches class scala.scalanative.runtime.PrimitiveBoolean +Some(true) +Checking if class scala.scalanative.runtime.PrimitiveBoolean matches class scala.scalanative.runtime.PrimitiveUnit +None +Checking if class java.lang.Boolean matches class scala.scalanative.runtime.PrimitiveBoolean +Some(true) +Checking if class scala.scalanative.runtime.PrimitiveUnit matches class scala.scalanative.runtime.PrimitiveUnit +Some(()) +Checking if class scala.scalanative.runtime.PrimitiveUnit matches class scala.scalanative.runtime.PrimitiveByte +None +Checking if class scala.scalanative.runtime.BoxedUnit$ matches class scala.scalanative.runtime.PrimitiveUnit +Some(()) diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/BlacklistedTests.txt b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/BlacklistedTests.txt new file mode 100644 index 0000000000..6b3ca95f30 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/BlacklistedTests.txt @@ -0,0 +1,1078 @@ +# Ported from Scala.js, might not be exhaustive enough (some blacklisted tests may actually work in SN) + +# +# POS +# + +# Spuriously fails too often, and causes other subsequent tests to fail too +# Note that this test, by design, stress-tests type checking +pos/t6367.scala + +# +# NEG +# + +# Does not create tasty.jar +neg/t12134 + +# +# RUN +# + +# Uses .java files +run/t12195 +run/t9200 +run/t8348 +run/noInlineUnknownIndy +run/specialize-functional-interface + +# Relies on the exact toString() representation of Floats/Doubles +run/t2378.scala + +# Using parts of the javalib we don't plan to support + +run/t5018.scala +run/t2417.scala +run/lazy-concurrent.scala +run/t3667.scala +run/t3038d.scala +run/shutdownhooks.scala +run/t5590.scala +run/t3895b.scala +run/t5974.scala +run/t5262.scala +run/serialize-stream.scala +run/lambda-serialization-gc.scala +run/t9390.scala +run/t9390b.scala +run/t9390c.scala +run/trait-defaults-super.scala +run/t2849.scala +run/t10488.scala +run/various-flat-classpath-types.scala + +# Uses j.l.Class stubs +run/t9437a.scala +run/t12002.scala +run/BoxUnboxTest.scala +run/module-serialization-proxy-class-unload.scala + +# Uses java.math.BigDecimal / BigInteger : but failures not due to them +run/is-valid-num.scala + +# Documented semantic difference on String.split(x: Array[Char]) +run/t0325.scala + +# Using Threads +run/inner-obj-auto.scala +run/predef-cycle.scala +run/synchronized.scala +run/sd409.scala + +# Uses java.security +run/t2318.scala + +# Tries to catch java.lang.StackOverflowError +run/t6154.scala + +# Taking too much time >60sec +run/t10594.scala +run/t3989.scala + +# Using IO + +run/t6488.scala +run/t6988.scala + +# Object{Output|Input}Streams +run/defaults-serizaliable-no-forwarders.scala +run/defaults-serizaliable-with-forwarders.scala +run/t6935.scala +run/t8188.scala +run/t9375.scala +run/t9365.scala +run/inlineAddDeserializeLambda.scala +run/sammy_seriazable.scala +run/lambda-serialization-security.scala +run/t10232.scala +run/t10233.scala +run/t10244.scala +run/t10522.scala +run/t11255 +run/transient-object.scala + +# Using System.getProperties + +run/t4426.scala + +# Using Await + +run/t7336.scala +run/t7775.scala +run/t10513.scala +run/future-flatmap-exec-count.scala + +# Using detailed stack trace + +run/t6308.scala + +# Using reflection + +run/reflection-package-name-conflict +run/sip23-toolbox-eval.scala +run/t6063 +run/t9644.scala +run/t12038a +run/t12038b + +run/mixin-bridge-methods.scala +run/t5125.scala +run/outertest.scala +run/t6223.scala +run/t5652b +run/elidable-opt.scala +run/nullable-lazyvals.scala +run/t4794.scala +run/t5652 +run/t5652c +run/getClassTest-old.scala +run/t8960.scala +run/t7965.scala +run/t8087.scala +run/t8931.scala +run/t8445.scala +run/lambda-serialization.scala + +run/reflection-repl-classes.scala +run/t5256e.scala +run/typetags_core.scala +run/reflection-constructormirror-toplevel-badpath.scala +run/t5276_1b.scala +run/reflection-sorted-decls.scala +run/toolbox_typecheck_implicitsdisabled.scala +run/t5418b.scala +run/toolbox_typecheck_macrosdisabled2.scala +run/abstypetags_serialize.scala +run/all-overridden.scala +run/showraw_tree_kinds.scala +run/showraw_tree_types_ids.scala +run/showraw_tree_types_typed.scala +run/showraw_tree_ids.scala +run/showraw_tree_ultimate.scala +run/t5266_2.scala +run/t5274_1.scala +run/t5224.scala +run/reflection-sanitychecks.scala +run/t6086-vanilla.scala +run/t5277_2.scala +run/reflection-methodsymbol-params.scala +run/reflection-valueclasses-standard.scala +run/t5274_2.scala +run/t5423.scala +run/reflection-modulemirror-toplevel-good.scala +run/t5419.scala +run/t5271_3.scala +run/reflection-enclosed-nested-basic.scala +run/reflection-enclosed-nested-nested-basic.scala +run/fail-non-value-types.scala +run/exprs_serialize.scala +run/t5258a.scala +run/typetags_without_scala_reflect_manifest_lookup.scala +run/t4110-new.scala +run/t5273_2b_newpatmat.scala +run/t6277.scala +run/t5335.scala +run/toolbox_typecheck_macrosdisabled.scala +run/reflection-modulemirror-inner-good.scala +run/t5229_2.scala +run/typetags_multi.scala +run/typetags_without_scala_reflect_typetag_manifest_interop.scala +run/reflection-constructormirror-toplevel-good.scala +run/reflection-magicsymbols-invoke.scala +run/t6392b.scala +run/t5229_1.scala +run/reflection-magicsymbols-vanilla.scala +run/t5225_2.scala +run/runtimeEval1.scala +run/reflection-enclosed-nested-inner-basic.scala +run/reflection-fieldmirror-ctorparam.scala +run/t6181.scala +run/reflection-magicsymbols-repl.scala +run/t5272_2_newpatmat.scala +run/t5270.scala +run/t5418a.scala +run/t5276_2b.scala +run/t5256f.scala +run/reflection-enclosed-basic.scala +run/reflection-constructormirror-inner-badpath.scala +run/interop_typetags_are_manifests.scala +run/newTags.scala +run/t5273_1_newpatmat.scala +run/reflection-constructormirror-nested-good.scala +run/t2236-new.scala +run/existentials3-new.scala +run/t6323b.scala +run/t5943a1.scala +run/reflection-fieldmirror-getsetval.scala +run/t5272_1_oldpatmat.scala +run/t5256h.scala +run/t1195-new.scala +run/t5840.scala +run/reflection-methodsymbol-returntype.scala +run/reflection-fieldmirror-accessorsareokay.scala +run/reflection-sorted-members.scala +run/reflection-allmirrors-tostring.scala +run/valueclasses-typetag-existential.scala +run/toolbox_console_reporter.scala +run/reflection-enclosed-inner-inner-basic.scala +run/t5256b.scala +run/bytecodecs.scala +run/elidable.scala +run/freetypes_false_alarm1.scala +run/freetypes_false_alarm2.scala +run/getClassTest-new.scala +run/idempotency-extractors.scala +run/idempotency-case-classes.scala +run/idempotency-this.scala +run/idempotency-labels.scala +run/idempotency-lazy-vals.scala +run/interop_manifests_are_abstypetags.scala +run/interop_manifests_are_typetags.scala +run/abstypetags_core.scala +run/macro-reify-abstypetag-notypeparams +run/macro-reify-abstypetag-typeparams-tags +run/macro-reify-abstypetag-typeparams-notags +run/macro-reify-abstypetag-usetypetag +run/macro-reify-freevars +run/macro-reify-splice-outside-reify +run/macro-reify-tagless-a +run/macro-reify-type +run/macro-reify-typetag-typeparams-tags +run/macro-reify-typetag-notypeparams +run/macro-undetparams-implicitval +run/manifests-new.scala +run/manifests-old.scala +run/no-pickle-skolems +run/position-val-def.scala +run/reflect-priv-ctor.scala +run/primitive-sigs-2-new.scala +run/primitive-sigs-2-old.scala +run/reflection-enclosed-inner-basic.scala +run/reflection-enclosed-inner-nested-basic.scala +run/reflection-constructormirror-inner-good.scala +run/reflection-constructormirror-nested-badpath.scala +run/reflection-fancy-java-classes +run/reflection-fieldsymbol-navigation.scala +run/reflection-fieldmirror-nmelocalsuffixstring.scala +run/reflection-fieldmirror-getsetvar.scala +run/reflection-fieldmirror-privatethis.scala +run/reflection-implicit.scala +run/reflection-mem-glbs.scala +run/reflection-mem-tags.scala +run/reflection-java-annotations +run/reflection-java-crtp +run/reflection-methodsymbol-typeparams.scala +run/reflection-modulemirror-nested-badpath.scala +run/reflection-modulemirror-inner-badpath.scala +run/reflection-modulemirror-nested-good.scala +run/reflection-modulemirror-toplevel-badpath.scala +run/reflection-sync-subtypes.scala +run/reflinit.scala +run/reflection-valueclasses-derived.scala +run/reflection-valueclasses-magic.scala +run/resetattrs-this.scala +run/runtimeEval2.scala +run/showraw_aliases.scala +run/showraw_mods.scala +run/shortClass.scala +run/showraw_nosymbol.scala +run/showraw_tree.scala +run/showraw_tree_types_untyped.scala +run/t1167.scala +run/t2577.scala +run/t2873.scala +run/t2886.scala +run/t3346j.scala +run/t3507-new.scala +run/t3569.scala +run/t5125b.scala +run/t5225_1.scala +run/t3425b +run/t5256a.scala +run/t5230.scala +run/t5256c.scala +run/t5256g.scala +run/t5266_1.scala +run/t5269.scala +run/t5271_1.scala +run/t5271_2.scala +run/t5271_4.scala +run/t5272_1_newpatmat.scala +run/t5272_2_oldpatmat.scala +run/t5273_1_oldpatmat.scala +run/t5273_2a_newpatmat.scala +run/t5273_2a_oldpatmat.scala +run/t5275.scala +run/t5276_1a.scala +run/t5276_2a.scala +run/t5277_1.scala +run/t5279.scala +run/t5334_1.scala +run/t5334_2.scala +run/t5415.scala +run/t5418.scala +run/t5704.scala +run/t5710-1.scala +run/t5710-2.scala +run/t5770.scala +run/t5894.scala +run/t5816.scala +run/t5824.scala +run/t5912.scala +run/t5942.scala +run/t5943a2.scala +run/t6023.scala +run/t6113.scala +run/t6175.scala +run/t6178.scala +run/t6199-mirror.scala +run/t6199-toolbox.scala +run/t6240-universe-code-gen.scala +run/t6221 +run/t6260b.scala +run/t6259.scala +run/t6287.scala +run/t6344.scala +run/t6392a.scala +run/t6591_1.scala +run/t6591_2.scala +run/t6591_3.scala +run/t6591_5.scala +run/t6591_6.scala +run/t6591_7.scala +run/t6608.scala +run/t6677.scala +run/t6687.scala +run/t6715.scala +run/t6719.scala +run/t6793.scala +run/t6860.scala +run/t6793b.scala +run/t6793c.scala +run/t7045.scala +run/t7046.scala +run/t7008-scala-defined +run/t7120b.scala +run/t7151.scala +run/t7214.scala +run/t7235.scala +run/t7331a.scala +run/t7331b.scala +run/t7331c.scala +run/t7558.scala +run/t7556 +run/t7779.scala +run/t7868b.scala +run/toolbox_current_run_compiles.scala +run/toolbox_default_reporter_is_silent.scala +run/toolbox_parse_package.scala +run/toolbox_silent_reporter.scala +run/toolbox_typecheck_inferimplicitvalue.scala +run/typetags_serialize.scala +run/valueclasses-typetag-basic.scala +run/WeakHashSetTest.scala +run/valueclasses-typetag-generic.scala +run/t4023.scala +run/t4024.scala +run/t6380.scala +run/t5273_2b_oldpatmat.scala +run/t8104 +run/t8047.scala +run/t6992 +run/var-arity-class-symbol.scala +run/typetags_symbolof_x.scala +run/typecheck +run/t8190.scala +run/t8192 +run/t8177f.scala +run/t7932.scala +run/t7700.scala +run/t7570c.scala +run/t7570b.scala +run/t7533.scala +run/t7570a.scala +run/t7044 +run/t7328.scala +run/t6733.scala +run/t6554.scala +run/t6732.scala +run/t6379 +run/t6411b.scala +run/t6411a.scala +run/t6260c.scala +run/t6260-delambdafy.scala +run/showdecl +run/reflection-sync-potpourri.scala +run/reflection-tags.scala +run/reflection-companiontype.scala +run/reflection-scala-annotations.scala +run/reflection-idtc.scala +run/macro-reify-nested-b2 +run/mixin-signatures.scala +run/reflection-companion.scala +run/macro-reify-nested-b1 +run/macro-reify-nested-a2 +run/macro-reify-nested-a1 +run/macro-reify-chained2 +run/macro-reify-chained1 +run/inferred-type-constructors.scala +run/mirror_symbolof_x.scala +run/t8196.scala +run/t8549b.scala +run/t8574.scala +run/t8637.scala +run/t6622.scala +run/toolbox_expand_macro.scala +run/toolbox-varargs +run/t9252.scala +run/t9182.scala +run/t9102.scala +run/t720.scala +run/t9408.scala +run/t10527.scala +run/trait-default-specialize.scala +run/lazy-locals-2.scala +run/t5294.scala +run/trait_fields_final.scala +run/trait_fields_bytecode.scala +run/trait_fields_volatile.scala +run/junitForwarders +run/reflect-java-param-names + +run/reify_ann2b.scala +run/reify_classfileann_a +run/reify_classfileann_b +run/reify_newimpl_29.scala +run/reify_magicsymbols.scala +run/reify_inheritance.scala +run/reify_newimpl_12.scala +run/reify_typerefs_2b.scala +run/reify_csv.scala +run/reify_inner2.scala +run/reify_maps_oldpatmat.scala +run/reify_newimpl_43.scala +run/reify_nested_inner_refers_to_local.scala +run/reify_closure7.scala +run/reify_closure8b.scala +run/reify_typerefs_3b.scala +run/reify_newimpl_44.scala +run/reify_newimpl_06.scala +run/reify_newimpl_05.scala +run/reify_newimpl_20.scala +run/reify_newimpl_23.scala +run/reify_metalevel_breach_-1_refers_to_1.scala +run/reify_newimpl_41.scala +run/reify-repl-fail-gracefully.scala +run/reify_fors_oldpatmat.scala +run/reify_inner3.scala +run/reify_closure8a.scala +run/reify_closures10.scala +run/reify_ann2a.scala +run/reify_newimpl_51.scala +run/reify_newimpl_47.scala +run/reify_extendbuiltins.scala +run/reify_newimpl_30.scala +run/reify_newimpl_38.scala +run/reify_closure2a.scala +run/reify_newimpl_45.scala +run/reify_closure1.scala +run/reify_generic2.scala +run/reify_printf.scala +run/reify_closure6.scala +run/reify_newimpl_37.scala +run/reify_newimpl_35.scala +run/reify_typerefs_3a.scala +run/reify_newimpl_25.scala +run/reify_ann4.scala +run/reify_typerefs_1b.scala +run/reify_newimpl_22.scala +run/reify_this.scala +run/reify_typerefs_2a.scala +run/reify_newimpl_03.scala +run/reify_newimpl_48.scala +run/reify_varargs.scala +run/reify_newimpl_42.scala +run/reify_newimpl_15.scala +run/reify_nested_inner_refers_to_global.scala +run/reify_newimpl_02.scala +run/reify_newimpl_01.scala +run/reify_fors_newpatmat.scala +run/reify_nested_outer_refers_to_local.scala +run/reify_newimpl_13.scala +run/reify_closure5a.scala +run/reify_inner4.scala +run/reify_sort.scala +run/reify_ann1a.scala +run/reify_closure4a.scala +run/reify_newimpl_33.scala +run/reify_sort1.scala +run/reify_properties.scala +run/reify_generic.scala +run/reify_newimpl_27.scala +run/reify-aliases.scala +run/reify_ann3.scala +run/reify-staticXXX.scala +run/reify_ann1b.scala +run/reify_ann5.scala +run/reify_anonymous.scala +run/reify-each-node-type.scala +run/reify_copypaste2.scala +run/reify_closure3a.scala +run/reify_copypaste1.scala +run/reify_complex.scala +run/reify_for1.scala +run/reify_getter.scala +run/reify_implicits-new.scala +run/reify_inner1.scala +run/reify_implicits-old.scala +run/reify_lazyunit.scala +run/reify_lazyevaluation.scala +run/reify_maps_newpatmat.scala +run/reify_metalevel_breach_+0_refers_to_1.scala +run/reify_metalevel_breach_-1_refers_to_0_a.scala +run/reify_metalevel_breach_-1_refers_to_0_b.scala +run/reify_nested_outer_refers_to_global.scala +run/reify_newimpl_04.scala +run/reify_newimpl_14.scala +run/reify_newimpl_11.scala +run/reify_newimpl_18.scala +run/reify_newimpl_19.scala +run/reify_newimpl_31.scala +run/reify_newimpl_21.scala +run/reify_newimpl_36.scala +run/reify_newimpl_39.scala +run/reify_newimpl_40.scala +run/reify_newimpl_49.scala +run/reify_newimpl_50.scala +run/reify_newimpl_52.scala +run/reify_renamed_term_basic.scala +run/reify_renamed_term_local_to_reifee.scala +run/reify_renamed_term_overloaded_method.scala +run/reify_renamed_type_basic.scala +run/reify_renamed_type_local_to_reifee.scala +run/reify_renamed_type_spliceable.scala +run/reify_typerefs_1a.scala +run/reify_timeofday.scala +run/reify_renamed_term_t5841.scala + +run/t7521b.scala +run/t8575b.scala +run/t8575c.scala +run/t8944c.scala +run/t9535.scala +run/t9814.scala +run/t10009.scala +run/t10075.scala +run/t10075b + +run/t8756.scala +run/inferred-type-constructors-hou.scala +run/trait-static-forwarder +run/SD-235.scala +run/t10026.scala +run/checkinit.scala +run/reflection-clinit +run/reflection-clinit-nested +run/t10487.scala + +run/typetags_caching.scala +run/type-tag-leak.scala +run/t10856.scala +run/module-static.scala + +# Uses reflection indirectly through +# scala.runtime.ScalaRunTime.replStringOf +run/t6634.scala + +# Using reflection to invoke macros. These tests actually don't require +# or test reflection, but use it to separate compilation units nicely. +# It's a pity we cannot use them + +run/macro-abort-fresh +run/macro-expand-varargs-explicit-over-nonvarargs-bad +run/macro-invalidret-doesnt-conform-to-def-rettype +run/macro-invalidret-nontypeable +run/macro-invalidusage-badret +run/macro-invalidusage-partialapplication +run/macro-invalidusage-partialapplication-with-tparams +run/macro-reflective-ma-normal-mdmi +run/macro-reflective-mamd-normal-mi + +# Using macros, but indirectly creating calls to reflection +run/macro-reify-unreify + +# Using Enumeration in a way we cannot fix + +run/enums.scala +run/t3719.scala +run/t8611b.scala + +# Expecting exceptions that are linking errors in Scala.js (e.g. NoSuchMethodException) +run/t10334.scala + +# Playing with classfile format + +run/classfile-format-51.scala +run/classfile-format-52.scala + +# Concurrent collections (TrieMap) +# has too much stuff implemented in *.java, so no support +run/triemap-hash.scala + +# Using Swing + +run/t3613.scala + +# Using the REPL + +run/repl-type.scala +run/repl-replay.scala +run/repl-errors.scala +run/repl-any-error.scala +run/repl-paste-error.scala +run/repl-previous-result.scala +run/repl-trace-elided-more.scala +run/t4285.scala +run/constant-type.scala +run/repl-bare-expr.scala +run/repl-parens.scala +run/repl-assign.scala +run/t5583.scala +run/treePrint.scala +run/constrained-types.scala +run/repl-power.scala +run/t4710.scala +run/repl-paste.scala +run/repl-reset.scala +run/repl-paste-3.scala +run/t6329_repl.scala +run/t6273.scala +run/repl-paste-2.scala +run/t5655.scala +run/t5072.scala +run/repl-colon-type.scala +run/repl-trim-stack-trace.scala +run/t4594-repl-settings.scala +run/repl-save.scala +run/repl-paste-raw.scala +run/repl-paste-4.scala +run/t7801.scala +run/repl-backticks.scala +run/t6633.scala +run/repl-inline.scala +run/repl-class-based-term-macros.scala +run/repl-always-use-instance.scala +run/repl-class-based-implicit-import.scala +run/repl-class-based-value-class.scala +run/repl-deadlock.scala +run/repl-class-based-outer-pointers.scala +run/repl-class-based-escaping-reads.scala + +# Using the Repl (scala.tools.partest.ReplTest) +run/t11991.scala +run/t11915.scala +run/t11899.scala +run/t11897.scala +run/t11838.scala +run/t11402.scala +run/t11064.scala +run/t10768.scala +run/class-symbol-contravariant.scala +run/macro-bundle-repl.scala +run/macro-repl-basic.scala +run/macro-repl-dontexpand.scala +run/macro-system-properties.scala +run/reflection-equality.scala +run/reflection-repl-elementary.scala +run/reify_newimpl_26.scala +run/repl-out-dir.scala +run/repl-term-macros.scala +run/repl-transcript.scala +run/repl-type-verbose.scala +run/t3376.scala +run/t4025.scala +run/t4172.scala +run/t4216.scala +run/t4542.scala +run/t4671.scala +run/t5256d.scala +run/t5535.scala +run/t5537.scala +run/t5789.scala +run/t6086-repl.scala +run/t6146b.scala +run/t6187.scala +run/t6320.scala +run/t6381.scala +run/t6434.scala +run/t6439.scala +run/t6507.scala +run/t6549.scala +run/t6937.scala +run/t7185.scala +run/t7319.scala +run/t7482a.scala +run/t7634.scala +run/t7747-repl.scala +run/t7805-repl-i.scala +run/tpeCache-tyconCache.scala +run/repl-empty-package +run/repl-javap-def.scala +run/repl-javap-mem.scala +run/repl-javap-outdir +run/repl-javap.scala +run/t6329_repl_bug.scala +run/t4950.scala +run/xMigration.scala +run/t6541-option.scala +run/repl-serialization.scala +run/t9174.scala +run/repl-paste-5.scala +run/repl-no-uescape.scala +run/repl-no-imports-no-predef-classbased.scala +run/repl-implicits-nopredef.scala +run/repl-classbased.scala +run/repl-no-imports-no-predef-power.scala +run/repl-paste-b.scala +run/repl-paste-6.scala +run/repl-implicits.scala +run/repl-no-imports-no-predef.scala +run/repl-paste-raw-b.scala +run/repl-paste-raw-c.scala +run/t9749-repl-dot.scala +run/trait_fields_repl.scala +run/t7139 +run/t9689 +run/trailing-commas.scala +run/t4700.scala +run/t9880-9881.scala +run/repl-kind.scala +run/t10284.scala +run/t9016.scala +run/repl-completions.scala +run/t10956.scala +run/t11564.scala +run/invalid-lubs.scala +run/constAnnArgs.scala +run/interpolation-repl.scala +run/t12292.scala +run/t12276.scala +run/t10943.scala + +# Using Scala Script (partest.ScriptTest) + +run/t7711-script-args.scala +run/t4625.scala +run/t4625c.scala +run/t4625b.scala + +# Using the compiler API + +run/nowarn.scala +run/t9944.scala +run/t3368.scala +run/t3368-b.scala +run/t2512.scala +run/analyzerPlugins.scala +run/compiler-asSeenFrom.scala +run/t5603.scala +run/t6440.scala +run/t5545.scala +run/existentials-in-compiler.scala +run/global-showdef.scala +run/stream_length.scala +run/annotatedRetyping.scala +run/imain.scala +run/existential-rangepos.scala +run/delambdafy_uncurry_byname_inline.scala +run/delambdafy_uncurry_byname_method.scala +run/delambdafy_uncurry_inline.scala +run/delambdafy_t6555.scala +run/delambdafy_uncurry_method.scala +run/delambdafy_t6028.scala +run/memberpos.scala +run/programmatic-main.scala +run/reflection-names.scala +run/settings-parse.scala +run/sm-interpolator.scala +run/t1501.scala +run/t1500.scala +run/t1618.scala +run/t2464 +run/t4072.scala +run/t5064.scala +run/t5385.scala +run/t5699.scala +run/t5717.scala +run/t5940.scala +run/t6028.scala +run/t6194.scala +run/t6669.scala +run/t6745-2.scala +run/t7096.scala +run/t7271.scala +run/t7337.scala +run/t7569.scala +run/t7852.scala +run/t7817-tree-gen.scala +run/extend-global.scala +run/t12062.scala + + +# partest.DirectTest +run/t12019 +run/t11815.scala +run/t11746.scala +run/t11731.scala +run/t11385.scala +run/t10819.scala +run/t10751.scala +run/t10641.scala +run/t10344.scala +run/t10203.scala +run/string-switch-pos.scala +run/patmat-seq.scala +run/maxerrs.scala +run/t6288.scala +run/t6331.scala +run/t6440b.scala +run/t6555.scala +run/t7876.scala +run/typetags_without_scala_reflect_typetag_lookup.scala +run/dynamic-updateDynamic.scala +run/dynamic-selectDynamic.scala +run/dynamic-applyDynamic.scala +run/dynamic-applyDynamicNamed.scala +run/t4841-isolate-plugins +run/large_code.scala +run/macroPlugins-namerHooks.scala +run/t4841-no-plugin.scala +run/t8029.scala +run/t8046 +run/t5905-features.scala +run/t5905b-features.scala +run/large_class.scala +run/t8708_b +run/icode-reader-dead-code.scala +run/t5938.scala +run/t8502.scala +run/t6502.scala +run/t8907.scala +run/t9097.scala +run/macroPlugins-enterStats.scala +run/sbt-icode-interface.scala +run/t8502b.scala +run/repl-paste-parse.scala +run/t5463.scala +run/t8433.scala +run/sd275.scala +run/sd275-java +run/t10471.scala +run/t6130.scala +run/t9437b.scala +run/t10552 +run/sd187.scala +run/patmat-origtp-switch.scala +run/indyLambdaKinds +run/t11802-pluginsdir +run/literals-parsing.scala +run/patmat-no-inline-isEmpty.scala +run/patmat-no-inline-unapply.scala +run/splain-tree.scala +run/splain-truncrefined.scala +run/splain.scala + +# Using partest.StoreReporterDirectTest +run/t10171 + +# partest.StubErrorMessageTest +run/StubErrorBInheritsFromA.scala +run/StubErrorComplexInnerClass.scala +run/StubErrorHK.scala +run/StubErrorReturnTypeFunction.scala +run/StubErrorReturnTypeFunction2.scala +run/StubErrorReturnTypePolyFunction.scala +run/StubErrorSubclasses.scala +run/StubErrorTypeclass.scala +run/StubErrorTypeDef.scala + +# partest.ASMConverters +run/t9403 + +# partest.BytecodeTest +run/t7106 +run/t7974 +run/t8601-closure-elim.scala +run/t4788 +run/t4788-separate-compilation + +# partest.SessionTest +run/t8843-repl-xlat.scala +run/t9206.scala +run/t9170.scala +run/t8918-unary-ids.scala +run/t1931.scala +run/t8935-class.scala +run/t8935-object.scala + +# partest.JavapTest +run/t8608-no-format.scala + +# Using .java source files + +run/t4317 +run/t4238 +run/t2296c +run/t4119 +run/t4283 +run/t4891 +run/t6168 +run/t6168b +run/t6240a +run/t6240b +run/t6548 +run/t6989 +run/t7008 +run/t7246 +run/t7246b +run/t7359 +run/t7439 +run/t7455 +run/t7510 +run/t7582-private-within +run/t7582 +run/t7582b +run/t3897 +run/t7374 +run/t3452e +run/t3452g +run/t3452d +run/t3452b +run/t3452a +run/t1430 +run/t4729 +run/t8442 +run/t8601e +run/t9298 +run/t9298b +run/t9359 +run/t7741a +run/t7741b +run/bcodeInlinerMixed +run/t9268 +run/t9489 +run/t9915 +run/t10059 +run/t1459 +run/t1459generic +run/t3236 +run/t9013 +run/t10231 +run/t10067 +run/t10249 +run/sd143 +run/t4283b +run/t7936 +run/t7936b +run/t9937 +run/t10368 +run/t10334b +run/sd304 +run/t10450 +run/t10042 +run/t10699 +run/t9529 +run/t9529-types +run/t10490 +run/t10490-2 +run/t10889 +run/t3899 +run/t11373 +run/t8928 + + +# Using partest.Properties (nest.Runner) +run/t4294.scala +run/tailcalls.scala + +# Using scala-script +run/t7791-script-linenums.scala + +# Using scalap +run/scalapInvokedynamic.scala + +# Using Manifests (which use Class.getInterfaces) +run/valueclasses-manifest-existential.scala +run/existentials3-old.scala +run/t2236-old.scala +run/interop_manifests_are_classtags.scala +run/valueclasses-manifest-generic.scala +run/valueclasses-manifest-basic.scala +run/t1195-old.scala +run/t3758-old.scala +run/t4110-old.scala +run/t6246.scala + + +# Using ScalaRunTime.stringOf +run/value-class-extractor-seq.scala +run/t3493.scala + +# Custom invoke dynamic node +run/indy-via-macro +run/indy-via-macro-with-dynamic-args + +### Bugs +## Compiler +run/anyval-box-types.scala +run/structural.scala +run/t8017 +run/t8601b.scala +run/t8601d.scala +run/t10069b.scala + +## JVM compliance +run/t5680.scala +run/try-catch-unify.scala +run/t2755.scala +run/java-erasure.scala + + +## Fails +run/t10290.scala +run/t6827.scala +run/classtags-cached.scala +run/sip23-cast-1.scala + +#OutOfMemoryError +run/stream-gc.scala + +## Check not passing +run/t266.scala +run/t4300.scala +run/t8334.scala +run/t8803.scala +run/t9697.scala + +#Missing symbols +run/t9400.scala + +## LLVM compilation fails +run/t7269.scala + +## Other +run/t10277.scala +run/t10277b.scala + +run/t12380 +run/t7448.scala diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t11952b.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t11952b.check new file mode 100644 index 0000000000..6043da6279 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t11952b.check @@ -0,0 +1,16 @@ +[running phase parser on t11952b.scala] +[running phase namer on t11952b.scala] +[running phase packageobjects on t11952b.scala] +[running phase typer on t11952b.scala] +[running phase nativeinterop on t11952b.scala] +[running phase superaccessors on t11952b.scala] +[running phase extmethods on t11952b.scala] +[running phase pickler on t11952b.scala] +[running phase refchecks on t11952b.scala] +t11952b.scala:9: error: cannot override final member: + final def f: String (defined in class C); + found : scala.this.Int + required: String + override def f: Int = 42 + ^ +1 error diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t6446-additional.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t6446-additional.check new file mode 100644 index 0000000000..173702fd11 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t6446-additional.check @@ -0,0 +1,29 @@ + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + nativeinterop 5 prepare ASTs for Native interop +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects + patmat 10 translate match expressions + uncurry 11 uncurry, translate function values to anonymous classes + fields 12 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 13 replace tail calls by jumps + specialize 14 @specialized-driven class and method specialization + explicitouter 15 this refs to outer pointers + erasure 16 erase types, add interfaces for traits + posterasure 17 clean up erased inline classes + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + nir 22 + cleanup 23 platform-specific cleanups, generate reflective calls + delambdafy 24 remove lambdas + jvm 25 generate JVM bytecode + ploogin 26 A sample phase that does so many things it's kind of hard... + terminal 27 the last phase during a compilation run diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t6446-list.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t6446-list.check new file mode 100644 index 0000000000..eba706333b --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t6446-list.check @@ -0,0 +1,2 @@ +ploogin - A sample plugin for testing. +nir - Compile to Scala Native IR (NIR) diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t6446-missing.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t6446-missing.check new file mode 100644 index 0000000000..c348d55c19 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t6446-missing.check @@ -0,0 +1,29 @@ +Error: unable to load class: t6446.Ploogin + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + nativeinterop 5 prepare ASTs for Native interop +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects + patmat 10 translate match expressions + uncurry 11 uncurry, translate function values to anonymous classes + fields 12 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 13 replace tail calls by jumps + specialize 14 @specialized-driven class and method specialization + explicitouter 15 this refs to outer pointers + erasure 16 erase types, add interfaces for traits + posterasure 17 clean up erased inline classes + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + nir 22 + cleanup 23 platform-specific cleanups, generate reflective calls + delambdafy 24 remove lambdas + jvm 25 generate JVM bytecode + terminal 26 the last phase during a compilation run diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t6446-show-phases.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t6446-show-phases.check new file mode 100644 index 0000000000..244dbec464 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t6446-show-phases.check @@ -0,0 +1,28 @@ + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + nativeinterop 5 prepare ASTs for Native interop +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects + patmat 10 translate match expressions + uncurry 11 uncurry, translate function values to anonymous classes + fields 12 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 13 replace tail calls by jumps + specialize 14 @specialized-driven class and method specialization + explicitouter 15 this refs to outer pointers + erasure 16 erase types, add interfaces for traits + posterasure 17 clean up erased inline classes + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + nir 22 + cleanup 23 platform-specific cleanups, generate reflective calls + delambdafy 24 remove lambdas + jvm 25 generate JVM bytecode + terminal 26 the last phase during a compilation run diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t7494-no-options.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t7494-no-options.check new file mode 100644 index 0000000000..d5c68d8139 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/neg/t7494-no-options.check @@ -0,0 +1,30 @@ +error: Error: ploogin takes no options + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + nativeinterop 5 prepare ASTs for Native interop +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects + patmat 10 translate match expressions + uncurry 11 uncurry, translate function values to anonymous classes + fields 12 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 13 replace tail calls by jumps + specialize 14 @specialized-driven class and method specialization + explicitouter 15 this refs to outer pointers + erasure 16 erase types, add interfaces for traits + posterasure 17 clean up erased inline classes + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + nir 22 + cleanup 23 platform-specific cleanups, generate reflective calls + delambdafy 24 remove lambdas + jvm 25 generate JVM bytecode + ploogin 26 A sample phase that does so many things it's kind of hard... + terminal 27 the last phase during a compilation run diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/classof.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/classof.check new file mode 100644 index 0000000000..21bf4cfb41 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/classof.check @@ -0,0 +1,22 @@ +Value types: +class scala.scalanative.runtime.PrimitiveUnit +class scala.scalanative.runtime.PrimitiveBoolean +class scala.scalanative.runtime.PrimitiveByte +class scala.scalanative.runtime.PrimitiveShort +class scala.scalanative.runtime.PrimitiveChar +class scala.scalanative.runtime.PrimitiveInt +class scala.scalanative.runtime.PrimitiveLong +class scala.scalanative.runtime.PrimitiveFloat +class scala.scalanative.runtime.PrimitiveDouble +Class types +class SomeClass +class scala.collection.immutable.List +class scala.Tuple2 +Arrays: +class scala.scalanative.runtime.ObjectArray +class scala.scalanative.runtime.IntArray +class scala.scalanative.runtime.DoubleArray +class scala.scalanative.runtime.ObjectArray +Functions: +interface scala.Function2 +interface scala.Function1 diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/classtags_contextbound.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/classtags_contextbound.check new file mode 100644 index 0000000000..5d3106c9bc --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/classtags_contextbound.check @@ -0,0 +1 @@ +class scala.scalanative.runtime.IntArray diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/classtags_multi.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/classtags_multi.check new file mode 100644 index 0000000000..ab1c14e439 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/classtags_multi.check @@ -0,0 +1,5 @@ +Int +Array[scala.scalanative.runtime.PrimitiveInt] +Array[java.lang.Object] +Array[java.lang.Object] +Array[java.lang.Object] diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/getClassTest-valueClass.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/getClassTest-valueClass.check new file mode 100644 index 0000000000..cee2875fff --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/getClassTest-valueClass.check @@ -0,0 +1,2 @@ +class scala.scalanative.runtime.PrimitiveInt +class V diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/interop_classtags_are_classmanifests.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/interop_classtags_are_classmanifests.check new file mode 100644 index 0000000000..5ef5b7138c --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/interop_classtags_are_classmanifests.check @@ -0,0 +1,3 @@ +Int +java.lang.String +Array[scala.scalanative.runtime.PrimitiveInt] diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/t4753.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/t4753.check new file mode 100644 index 0000000000..9a020c1ead --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/t4753.check @@ -0,0 +1 @@ +class scala.scalanative.runtime.PrimitiveBoolean diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/t5568.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/t5568.check new file mode 100644 index 0000000000..0018046644 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/t5568.check @@ -0,0 +1,9 @@ +class scala.scalanative.runtime.PrimitiveUnit +class scala.scalanative.runtime.PrimitiveInt +class scala.runtime.BoxedUnit +class scala.runtime.BoxedUnit +class java.lang.Integer +class java.lang.Integer +5 +5 +5 diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/t5923b.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/t5923b.check new file mode 100644 index 0000000000..a4885c883f --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/t5923b.check @@ -0,0 +1,3 @@ +class scala.scalanative.runtime.ObjectArray +class scala.scalanative.runtime.ObjectArray +class scala.scalanative.runtime.ObjectArray diff --git a/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/t6318_primitives.check b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/t6318_primitives.check new file mode 100644 index 0000000000..1b64e046c7 --- /dev/null +++ b/scala-partest-tests/src/test/resources/scala/tools/partest/scalanative/2.13.11/run/t6318_primitives.check @@ -0,0 +1,54 @@ +Checking if class scala.scalanative.runtime.PrimitiveByte matches class scala.scalanative.runtime.PrimitiveByte +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveByte matches class scala.scalanative.runtime.PrimitiveShort +None +Checking if class java.lang.Byte matches class scala.scalanative.runtime.PrimitiveByte +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveShort matches class scala.scalanative.runtime.PrimitiveShort +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveShort matches class scala.scalanative.runtime.PrimitiveChar +None +Checking if class java.lang.Short matches class scala.scalanative.runtime.PrimitiveShort +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveChar matches class scala.scalanative.runtime.PrimitiveChar +Some() +Checking if class scala.scalanative.runtime.PrimitiveChar matches class scala.scalanative.runtime.PrimitiveInt +None +Checking if class java.lang.Character matches class scala.scalanative.runtime.PrimitiveChar +Some() +Checking if class scala.scalanative.runtime.PrimitiveInt matches class scala.scalanative.runtime.PrimitiveInt +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveInt matches class scala.scalanative.runtime.PrimitiveLong +None +Checking if class java.lang.Integer matches class scala.scalanative.runtime.PrimitiveInt +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveLong matches class scala.scalanative.runtime.PrimitiveLong +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveLong matches class scala.scalanative.runtime.PrimitiveFloat +None +Checking if class java.lang.Long matches class scala.scalanative.runtime.PrimitiveLong +Some(1) +Checking if class scala.scalanative.runtime.PrimitiveFloat matches class scala.scalanative.runtime.PrimitiveFloat +Some(1.0) +Checking if class scala.scalanative.runtime.PrimitiveFloat matches class scala.scalanative.runtime.PrimitiveDouble +None +Checking if class java.lang.Float matches class scala.scalanative.runtime.PrimitiveFloat +Some(1.0) +Checking if class scala.scalanative.runtime.PrimitiveDouble matches class scala.scalanative.runtime.PrimitiveDouble +Some(1.0) +Checking if class scala.scalanative.runtime.PrimitiveDouble matches class scala.scalanative.runtime.PrimitiveBoolean +None +Checking if class java.lang.Double matches class scala.scalanative.runtime.PrimitiveDouble +Some(1.0) +Checking if class scala.scalanative.runtime.PrimitiveBoolean matches class scala.scalanative.runtime.PrimitiveBoolean +Some(true) +Checking if class scala.scalanative.runtime.PrimitiveBoolean matches class scala.scalanative.runtime.PrimitiveUnit +None +Checking if class java.lang.Boolean matches class scala.scalanative.runtime.PrimitiveBoolean +Some(true) +Checking if class scala.scalanative.runtime.PrimitiveUnit matches class scala.scalanative.runtime.PrimitiveUnit +Some(()) +Checking if class scala.scalanative.runtime.PrimitiveUnit matches class scala.scalanative.runtime.PrimitiveByte +None +Checking if class scala.scalanative.runtime.BoxedUnit$ matches class scala.scalanative.runtime.PrimitiveUnit +Some(()) diff --git a/scalalib/overrides-2.12/scala/reflect/Manifest.scala.patch b/scalalib/overrides-2.12/scala/reflect/Manifest.scala.patch index f24b831954..dc6c283524 100644 --- a/scalalib/overrides-2.12/scala/reflect/Manifest.scala.patch +++ b/scalalib/overrides-2.12/scala/reflect/Manifest.scala.patch @@ -1,6 +1,11 @@ ---- 2.12.15/scala/reflect/Manifest.scala +--- 2.12.17/scala/reflect/Manifest.scala +++ overrides-2.12/scala/reflect/Manifest.scala -@@ -76,8 +76,8 @@ +@@ -1,3 +1,4 @@ ++ + /* + * Scala (https://www.scala-lang.org) + * +@@ -76,8 +77,8 @@ case _ => false } override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] @@ -11,136 +16,20 @@ } /** `ManifestFactory` defines factory methods for manifests. -@@ -105,7 +105,7 @@ - } - private def readResolve(): Any = Manifest.Byte +@@ -241,9 +242,9 @@ } -- val Byte: AnyValManifest[Byte] = new ByteManifest -+ @inline def Byte: AnyValManifest[Byte] = new ByteManifest + val Unit: AnyValManifest[Unit] = new UnitManifest - @SerialVersionUID(1L) - private class ShortManifest extends AnyValManifest[scala.Short]("Short") { -@@ -121,7 +121,7 @@ - } - private def readResolve(): Any = Manifest.Short - } -- val Short: AnyValManifest[Short] = new ShortManifest -+ @inline def Short: AnyValManifest[Short] = new ShortManifest - - @SerialVersionUID(1L) - private class CharManifest extends AnyValManifest[scala.Char]("Char") { -@@ -137,7 +137,7 @@ - } - private def readResolve(): Any = Manifest.Char - } -- val Char: AnyValManifest[Char] = new CharManifest -+ @inline def Char: AnyValManifest[Char] = new CharManifest - - @SerialVersionUID(1L) - private class IntManifest extends AnyValManifest[scala.Int]("Int") { -@@ -153,7 +153,7 @@ - } - private def readResolve(): Any = Manifest.Int - } -- val Int: AnyValManifest[Int] = new IntManifest -+ @inline def Int: AnyValManifest[Int] = new IntManifest - - @SerialVersionUID(1L) - private class LongManifest extends AnyValManifest[scala.Long]("Long") { -@@ -169,7 +169,7 @@ - } - private def readResolve(): Any = Manifest.Long - } -- val Long: AnyValManifest[Long] = new LongManifest -+ @inline def Long: AnyValManifest[Long] = new LongManifest - - @SerialVersionUID(1L) - private class FloatManifest extends AnyValManifest[scala.Float]("Float") { -@@ -185,7 +185,7 @@ - } - private def readResolve(): Any = Manifest.Float - } -- val Float: AnyValManifest[Float] = new FloatManifest -+ @inline def Float: AnyValManifest[Float] = new FloatManifest - - @SerialVersionUID(1L) - private class DoubleManifest extends AnyValManifest[scala.Double]("Double") { -@@ -204,7 +204,7 @@ - } - private def readResolve(): Any = Manifest.Double - } -- val Double: AnyValManifest[Double] = new DoubleManifest -+ @inline def Double: AnyValManifest[Double] = new DoubleManifest - - @SerialVersionUID(1L) - private class BooleanManifest extends AnyValManifest[scala.Boolean]("Boolean") { -@@ -220,7 +220,7 @@ - } - private def readResolve(): Any = Manifest.Boolean - } -- val Boolean: AnyValManifest[Boolean] = new BooleanManifest -+ @inline def Boolean: AnyValManifest[Boolean] = new BooleanManifest - - @SerialVersionUID(1L) - private class UnitManifest extends AnyValManifest[scala.Unit]("Unit") { -@@ -239,7 +239,7 @@ - } - private def readResolve(): Any = Manifest.Unit - } -- val Unit: AnyValManifest[Unit] = new UnitManifest -+ @inline def Unit: AnyValManifest[Unit] = new UnitManifest - - private val ObjectTYPE = classOf[java.lang.Object] - private val NothingTYPE = classOf[scala.runtime.Nothing$] -@@ -251,7 +251,7 @@ - override def <:<(that: ClassManifest[_]): Boolean = (that eq this) - private def readResolve(): Any = Manifest.Any - } -- val Any: Manifest[scala.Any] = new AnyManifest -+ @inline def Any: Manifest[scala.Any] = new AnyManifest - - @SerialVersionUID(1L) - private class ObjectManifest extends PhantomManifest[java.lang.Object](ObjectTYPE, "Object") { -@@ -259,9 +259,9 @@ - override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) - private def readResolve(): Any = Manifest.Object - } -- val Object: Manifest[java.lang.Object] = new ObjectManifest -+ @inline def Object: Manifest[java.lang.Object] = new ObjectManifest - -- val AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]] -+ @inline def AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]] - - @SerialVersionUID(1L) - private class AnyValPhantomManifest extends PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") { -@@ -269,7 +269,7 @@ - override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) - private def readResolve(): Any = Manifest.AnyVal - } -- val AnyVal: Manifest[scala.AnyVal] = new AnyValPhantomManifest -+ @inline def AnyVal: Manifest[scala.AnyVal] = new AnyValPhantomManifest - - @SerialVersionUID(1L) - private class NullManifest extends PhantomManifest[scala.Null](NullTYPE, "Null") { -@@ -278,7 +278,7 @@ - (that ne null) && (that ne Nothing) && !(that <:< AnyVal) - private def readResolve(): Any = Manifest.Null - } -- val Null: Manifest[scala.Null] = new NullManifest -+ @inline def Null: Manifest[scala.Null] = new NullManifest - - @SerialVersionUID(1L) - private class NothingManifest extends PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") { -@@ -286,7 +286,7 @@ - override def <:<(that: ClassManifest[_]): Boolean = (that ne null) - private def readResolve(): Any = Manifest.Nothing - } -- val Nothing: Manifest[scala.Nothing] = new NothingManifest -+ @inline def Nothing: Manifest[scala.Nothing] = new NothingManifest +- private val ObjectTYPE = classOf[java.lang.Object] +- private val NothingTYPE = classOf[scala.runtime.Nothing$] +- private val NullTYPE = classOf[scala.runtime.Null$] ++ @inline private def ObjectTYPE = classOf[java.lang.Object] ++ @inline private def NothingTYPE = classOf[scala.runtime.Nothing$] ++ @inline private def NullTYPE = classOf[scala.runtime.Null$] @SerialVersionUID(1L) - private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] { -@@ -323,8 +323,8 @@ + private class AnyManifest extends PhantomManifest[scala.Any](ObjectTYPE, "Any") { +@@ -323,8 +324,8 @@ private abstract class PhantomManifest[T](_runtimeClass: Predef.Class[_], override val toString: String) extends ClassTypeManifest[T](None, _runtimeClass, Nil) { override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] diff --git a/scalalib/overrides-3.2.2/scala/runtime/LazyVals.scala.patch b/scalalib/overrides-3.2.2/scala/runtime/LazyVals.scala.patch new file mode 100644 index 0000000000..e3f4dcf534 --- /dev/null +++ b/scalalib/overrides-3.2.2/scala/runtime/LazyVals.scala.patch @@ -0,0 +1,158 @@ +--- 3.2.2/scala/runtime/LazyVals.scala ++++ overrides-3/scala/runtime/LazyVals.scala +@@ -4,42 +4,13 @@ + + import scala.annotation.* + ++import scala.scalanative.runtime.* ++ + /** + * Helper methods used in thread-safe lazy vals. + */ + object LazyVals { +- @nowarn +- private[this] val unsafe: sun.misc.Unsafe = +- classOf[sun.misc.Unsafe].getDeclaredFields.nn.find { field => +- field.nn.getType == classOf[sun.misc.Unsafe] && { +- field.nn.setAccessible(true) +- true +- } +- } +- .map(_.nn.get(null).asInstanceOf[sun.misc.Unsafe]) +- .getOrElse { +- throw new ExceptionInInitializerError { +- new IllegalStateException("Can't find instance of sun.misc.Unsafe") +- } +- } +- +- private[this] val base: Int = { +- val processors = java.lang.Runtime.getRuntime.nn.availableProcessors() +- 8 * processors * processors +- } +- +- private[this] val monitors: Array[Object] = +- Array.tabulate(base)(_ => new Object) +- +- private def getMonitor(obj: Object, fieldId: Int = 0) = { +- var id = (java.lang.System.identityHashCode(obj) + fieldId) % base +- +- if (id < 0) id += base +- monitors(id) +- } +- + private final val LAZY_VAL_MASK = 3L +- private final val debug = false + + /* ------------- Start of public API ------------- */ + +@@ -71,96 +42,49 @@ + + def STATE(cur: Long, ord: Int): Long = { + val r = (cur >> (ord * BITS_PER_LAZY_VAL)) & LAZY_VAL_MASK +- if (debug) +- println(s"STATE($cur, $ord) = $r") + r + } + + def CAS(t: Object, offset: Long, e: Long, v: Int, ord: Int): Boolean = { +- if (debug) +- println(s"CAS($t, $offset, $e, $v, $ord)") +- val mask = ~(LAZY_VAL_MASK << ord * BITS_PER_LAZY_VAL) +- val n = (e & mask) | (v.toLong << (ord * BITS_PER_LAZY_VAL)) +- unsafe.compareAndSwapLong(t, offset, e, n) ++ unexpectedUsage() + } + + @experimental + def objCAS(t: Object, offset: Long, exp: Object, n: Object): Boolean = { +- if (debug) +- println(s"objCAS($t, $exp, $n)") +- unsafe.compareAndSwapObject(t, offset, exp, n) ++ unexpectedUsage() + } + + def setFlag(t: Object, offset: Long, v: Int, ord: Int): Unit = { +- if (debug) +- println(s"setFlag($t, $offset, $v, $ord)") +- var retry = true +- while (retry) { +- val cur = get(t, offset) +- if (STATE(cur, ord) == 1) retry = !CAS(t, offset, cur, v, ord) +- else { +- // cur == 2, somebody is waiting on monitor +- if (CAS(t, offset, cur, v, ord)) { +- val monitor = getMonitor(t, ord) +- monitor.synchronized { +- monitor.notifyAll() +- } +- retry = false +- } +- } +- } ++ unexpectedUsage() + } + + def wait4Notification(t: Object, offset: Long, cur: Long, ord: Int): Unit = { +- if (debug) +- println(s"wait4Notification($t, $offset, $cur, $ord)") +- var retry = true +- while (retry) { +- val cur = get(t, offset) +- val state = STATE(cur, ord) +- if (state == 1) CAS(t, offset, cur, 2, ord) +- else if (state == 2) { +- val monitor = getMonitor(t, ord) +- monitor.synchronized { +- if (STATE(get(t, offset), ord) == 2) // make sure notification did not happen yet. +- monitor.wait() +- } +- } +- else retry = false +- } ++ unexpectedUsage() + } + + def get(t: Object, off: Long): Long = { +- if (debug) +- println(s"get($t, $off)") +- unsafe.getLongVolatile(t, off) ++ unexpectedUsage() + } + + // kept for backward compatibility + def getOffset(clz: Class[_], name: String): Long = { +- @nowarn +- val r = unsafe.objectFieldOffset(clz.getDeclaredField(name)) +- if (debug) +- println(s"getOffset($clz, $name) = $r") +- r ++ unexpectedUsage() + } + + @experimental + def getStaticFieldOffset(field: java.lang.reflect.Field): Long = { +- @nowarn +- val r = unsafe.staticFieldOffset(field) +- if (debug) +- println(s"getStaticFieldOffset(${field.getDeclaringClass}, ${field.getName}) = $r") +- r ++ unexpectedUsage() + } + + def getOffsetStatic(field: java.lang.reflect.Field) = +- @nowarn +- val r = unsafe.objectFieldOffset(field) +- if (debug) +- println(s"getOffset(${field.getDeclaringClass}, ${field.getName}) = $r") +- r ++ unexpectedUsage() + ++ private def unexpectedUsage() = { ++ throw new IllegalStateException( ++ "Unexpected usage of scala.runtime.LazyVals method, " + ++ "in Scala Native lazy vals use overriden version of this class" ++ ) ++ } + + object Names { + final val state = "STATE" diff --git a/scalalib/overrides-3/scala/runtime/LazyVals.scala.patch b/scalalib/overrides-3/scala/runtime/LazyVals.scala.patch index e3f4dcf534..a6c24ca33c 100644 --- a/scalalib/overrides-3/scala/runtime/LazyVals.scala.patch +++ b/scalalib/overrides-3/scala/runtime/LazyVals.scala.patch @@ -1,6 +1,6 @@ ---- 3.2.2/scala/runtime/LazyVals.scala +--- 3.3.0-RC4/scala/runtime/LazyVals.scala +++ overrides-3/scala/runtime/LazyVals.scala -@@ -4,42 +4,13 @@ +@@ -4,44 +4,13 @@ import scala.annotation.* @@ -11,19 +11,21 @@ */ object LazyVals { - @nowarn -- private[this] val unsafe: sun.misc.Unsafe = -- classOf[sun.misc.Unsafe].getDeclaredFields.nn.find { field => -- field.nn.getType == classOf[sun.misc.Unsafe] && { -- field.nn.setAccessible(true) -- true -- } -- } -- .map(_.nn.get(null).asInstanceOf[sun.misc.Unsafe]) -- .getOrElse { -- throw new ExceptionInInitializerError { -- new IllegalStateException("Can't find instance of sun.misc.Unsafe") -- } -- } +- private[this] val unsafe: sun.misc.Unsafe = { +- def throwInitializationException() = +- throw new ExceptionInInitializerError( +- new IllegalStateException("Can't find instance of sun.misc.Unsafe") +- ) +- try +- val unsafeField = classOf[sun.misc.Unsafe].getDeclaredField("theUnsafe").nn +- if unsafeField.getType == classOf[sun.misc.Unsafe] then +- unsafeField.setAccessible(true) +- unsafeField.get(null).asInstanceOf[sun.misc.Unsafe] +- else +- throwInitializationException() +- catch case _: NoSuchFieldException => +- throwInitializationException() +- } - - private[this] val base: Int = { - val processors = java.lang.Runtime.getRuntime.nn.availableProcessors() @@ -45,7 +47,7 @@ /* ------------- Start of public API ------------- */ -@@ -71,96 +42,49 @@ +@@ -70,94 +39,47 @@ def STATE(cur: Long, ord: Int): Long = { val r = (cur >> (ord * BITS_PER_LAZY_VAL)) & LAZY_VAL_MASK @@ -63,7 +65,6 @@ + unexpectedUsage() } - @experimental def objCAS(t: Object, offset: Long, exp: Object, n: Object): Boolean = { - if (debug) - println(s"objCAS($t, $exp, $n)") @@ -129,7 +130,6 @@ + unexpectedUsage() } - @experimental def getStaticFieldOffset(field: java.lang.reflect.Field): Long = { - @nowarn - val r = unsafe.staticFieldOffset(field) diff --git a/scripts/scalalib-patch-tool.sc b/scripts/scalalib-patch-tool.sc index 245945b6cc..01b6488aab 100644 --- a/scripts/scalalib-patch-tool.sc +++ b/scripts/scalalib-patch-tool.sc @@ -1,6 +1,11 @@ -import $ivy.`com.lihaoyi::ammonite-ops:2.3.8`, ammonite.ops._, mainargs._ -import $ivy.`io.github.java-diff-utils:java-diff-utils:4.9`, -com.github.difflib.{DiffUtils, UnifiedDiffUtils} +//> using dep "io.github.java-diff-utils:java-diff-utils:4.12" +//> using dep "com.lihaoyi::os-lib:0.9.1" +//> using dep "com.lihaoyi::mainargs:0.4.0" + +import com.github.difflib.{DiffUtils, UnifiedDiffUtils} +import os._ +import mainargs._ + import scala.util._ val ignoredFiles = { @@ -23,29 +28,29 @@ def main( doc = "Path to directory containing overrides, defaults to scalalib/overrides-$scalaBinaryVersion" ) - overridesDir: Option[os.Path] = None + overridesDir: Option[String] = None ) = { val Array(vMajor, vMinor, vPatch) = scalaVersion.split('.') implicit val wd: os.Path = pwd - val sourcesDir = pwd / 'scalalib / 'target / 'scalaSources / scalaVersion - val overridesDirPath = { - overridesDir - .orElse { + val sourcesDir = pwd / "scalalib" / "target" / "scalaSources" / scalaVersion + val overridesDirPath: os.Path = + overridesDir.map(os.Path(_)).getOrElse { + { val overridesDir = s"overrides" val scalaEpochDir = s"$overridesDir-$vMajor" val binaryVersionDir = s"$scalaEpochDir.$vMinor" val scalaVersionDir = s"$binaryVersionDir.$vPatch" List(scalaVersionDir, binaryVersionDir, scalaEpochDir, overridesDir) - .map(pwd / 'scalalib / _) + .map(pwd / "scalalib" / _) .find(exists(_)) } - .getOrElse( - sys.error("Not found any existing default scalalib override dir") - ) - } + .getOrElse( + sys.error("Not found any existing default scalalib override dir") + ) + } println(s""" |Attempting to $cmd with config: @@ -56,7 +61,7 @@ def main( | - ${ignoredFiles.mkString("\n - ")} |""".stripMargin) - assert(exists ! overridesDirPath, "Overrides dir does not exists") + assert(os.exists(overridesDirPath), "Overrides dir does not exists") cmd match { // Create patches based on fetched Scala sources and it's overrideds @@ -64,12 +69,14 @@ def main( sourcesExistsOrFetch(scalaVersion, sourcesDir) for { - overridePath <- ls.rec ! overridesDirPath |? (_.ext == "scala") + overridePath <- os + .walk(overridesDirPath) + .filterNot(p => p.ext != "scala" || os.isDir(p)) relativePath = overridePath relativeTo overridesDirPath if !ignoredFiles.contains(relativePath) - sourcePath = sourcesDir / relativePath if exists ! sourcePath + sourcePath = sourcesDir / relativePath if os.exists(sourcePath) patchPath = overridePath / up / s"${overridePath.last}.patch" - _ = if (exists ! patchPath) rm ! patchPath + _ = if (os.exists(patchPath)) os.remove(patchPath) } { val originalLines = fileToLines(sourcePath) val diff = DiffUtils.diff( @@ -107,13 +114,15 @@ def main( sourcesExistsOrFetch(scalaVersion, sourcesDir) for { - patchPath <- ls.rec ! overridesDirPath |? (_.ext == "patch") + patchPath <- os + .walk(overridesDirPath) + .filterNot(p => p.ext != "patch" || os.isDir(p)) overridePath = patchPath / up / patchPath.last.stripSuffix(".patch") relativePath = overridePath relativeTo overridesDirPath if !ignoredFiles.contains(relativePath) sourcePath = sourcesDir / relativePath - _ = if (exists(overridePath)) rm ! overridePath + _ = if (exists(overridePath)) os.remove(overridePath) } { // There is no JVM library working with diffs which can apply fuzzy @@ -126,13 +135,13 @@ def main( copyAttributes = true ) try { - %%( + os.proc( "git", "apply", "--whitespace=fix", "--recount", patchPath - )(sourcesDir) + ) call (cwd = sourcesDir) os.move(sourcePath, overridePath, replaceExisting = true) os.move(sourceCopyPath, sourcePath) println(s"Recreated $overridePath") @@ -147,7 +156,11 @@ def main( // Walk overrides dir and remove all `.scala` sources which has defined `.scala.patch` sibling case PruneOverrides => for { - patchPath <- ls.rec ! overridesDirPath |? (_.ext == "patch") + patchPath <- os.walk( + overridesDirPath, + skip = _.ext != "patch", + includeTarget = false + ) overridePath = patchPath / up / patchPath.last.stripSuffix(".patch") relativePath = overridePath relativeTo overridesDirPath @@ -155,7 +168,7 @@ def main( !ignoredFiles.contains(relativePath) } { if (shallPrune) { - rm ! overridePath + os.remove(overridePath) } } } @@ -188,7 +201,9 @@ def sourcesExistsOrFetch(scalaVersion: String, sourcesDir: os.Path)(implicit ) = { if (!exists(sourcesDir)) { println(s"Fetching Scala $scalaVersion sources") - %("sbt", s"++ $scalaVersion", "scalalib/fetchScalaSource") + os.proc("sbt", s"++ $scalaVersion", "scalalib/fetchScalaSource").call() } - assert(exists ! sourcesDir, s"Sources at $sourcesDir missing") + assert(os.exists(sourcesDir), s"Sources at $sourcesDir missing") } + +ParserForMethods(this).runOrThrow(args, allowPositional = true) diff --git a/tools-benchmarks/src/main/scala/scala/scalanative/benchmarks/package.scala b/tools-benchmarks/src/main/scala/scala/scalanative/benchmarks/package.scala new file mode 100644 index 0000000000..929839b2de --- /dev/null +++ b/tools-benchmarks/src/main/scala/scala/scalanative/benchmarks/package.scala @@ -0,0 +1,18 @@ +package scala.scalanative + +import scala.scalanative.build._ + +package object benchmarks { + lazy val defaultNativeConfig = NativeConfig.empty + .withClang(Discover.clang()) + .withClangPP(Discover.clangpp()) + .withCompileOptions(Discover.compileOptions()) + .withLinkingOptions(Discover.linkingOptions()) + + lazy val defaultConfig = Config.empty + .withClassPath(BuildInfo.fullTestSuiteClasspath.map(_.toPath)) + .withLogger(Logger.nullLogger) + .withCompilerConfig(defaultNativeConfig) + + val TestMain = "scala.scalanative.testinterface.TestMain" +} diff --git a/tools-benchmarks/src/main/scala/scala/scalanative/benchmarks/testinterface/CodeGenBench.scala b/tools-benchmarks/src/main/scala/scala/scalanative/benchmarks/testinterface/CodeGenBench.scala new file mode 100644 index 0000000000..a69275e10f --- /dev/null +++ b/tools-benchmarks/src/main/scala/scala/scalanative/benchmarks/testinterface/CodeGenBench.scala @@ -0,0 +1,60 @@ +package scala.scalanative +package benchmarks +package testinterface + +import java.nio.file.{Path, Files} +import java.util.Comparator +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.annotations.Mode._ + +import scala.scalanative.build._ + +@Fork(1) +@State(Scope.Benchmark) +@BenchmarkMode(Array(AverageTime)) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 5) +@Measurement(iterations = 10) +abstract class CodeGenBench(nativeConfig: NativeConfig => NativeConfig) { + var config: Config = _ + var linked: linker.Result = _ + + @Setup(Level.Trial) + def setup(): Unit = { + val workdir = Files.createTempDirectory("codegen-bench") + config = defaultConfig + .withWorkdir(workdir) + .withMainClass(TestMain) + .withCompilerConfig(nativeConfig) + Files.createDirectories(config.workdir) + + val entries = build.core.ScalaNative.entries(config) + util.Scope { implicit scope => + linked = core.ScalaNative.link(config, entries) + } + } + + @TearDown(Level.Trial) + def cleanup(): Unit = { + val workdir = config.workdir + Files + .walk(workdir) + .sorted(Comparator.reverseOrder()) + .forEach(Files.delete) + linked = null + config = null + } + + @Benchmark + def codeGen(): Unit = { + val paths = core.ScalaNative.codegen(config, linked) + assert(paths.nonEmpty) + } +} + +class CodeGen + extends CodeGenBench( + nativeConfig = _.withIncrementalCompilation(false) + ) diff --git a/tools-benchmarks/src/main/scala/scala/scalanative/benchmarks/testinterface/LinkerBench.scala b/tools-benchmarks/src/main/scala/scala/scalanative/benchmarks/testinterface/LinkerBench.scala new file mode 100644 index 0000000000..c80b560d9c --- /dev/null +++ b/tools-benchmarks/src/main/scala/scala/scalanative/benchmarks/testinterface/LinkerBench.scala @@ -0,0 +1,44 @@ +package scala.scalanative +package benchmarks + +import java.nio.file.{Path, Files} +import java.util.Comparator +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.annotations.Mode._ + +@Fork(1) +@State(Scope.Benchmark) +@BenchmarkMode(Array(AverageTime)) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 10, time = 500, timeUnit = TimeUnit.MILLISECONDS) +@Measurement(iterations = 10, time = 500, timeUnit = TimeUnit.MILLISECONDS) +class LinkerBench { + var workdir: Path = _ + + @Setup(Level.Iteration) + def setup(): Unit = { + workdir = Files.createTempDirectory("linker-bench") + } + + @TearDown(Level.Iteration) + def cleanup(): Unit = { + Files + .walk(workdir) + .sorted(Comparator.reverseOrder()) + .forEach(Files.delete) + workdir = null + } + + @Benchmark + def link(): Unit = util.Scope { implicit scope => + val config = defaultConfig + .withWorkdir(workdir) + .withMainClass(TestMain) + + val entries = build.core.ScalaNative.entries(config) + val linked = build.core.ScalaNative.link(config, entries) + assert(linked.unavailable.size == 0) + } +} diff --git a/tools-benchmarks/src/main/scala/scala/scalanative/benchmarks/testinterface/OptimizerBench.scala b/tools-benchmarks/src/main/scala/scala/scalanative/benchmarks/testinterface/OptimizerBench.scala new file mode 100644 index 0000000000..37acbfdf70 --- /dev/null +++ b/tools-benchmarks/src/main/scala/scala/scalanative/benchmarks/testinterface/OptimizerBench.scala @@ -0,0 +1,58 @@ +package scala.scalanative +package benchmarks + +import java.nio.file.{Path, Files} +import java.util.Comparator +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.annotations.Mode._ + +import scala.scalanative.build._ + +@Fork(1) +@State(Scope.Benchmark) +@BenchmarkMode(Array(AverageTime)) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 5) +@Measurement(iterations = 10) +abstract class OptimizerBench(mode: build.Mode) { + var config: Config = _ + var linked: linker.Result = _ + + @Setup(Level.Trial) + def setup(): Unit = { + val workdir = Files.createTempDirectory("optimize-bench") + config = defaultConfig + .withWorkdir(workdir) + .withMainClass(TestMain) + .withCompilerConfig(_.withMode(mode)) + + val entries = core.ScalaNative.entries(config) + util.Scope { implicit scope => + linked = core.ScalaNative.link(config, entries) + } + } + + @TearDown(Level.Trial) + def cleanup(): Unit = { + val workdir = config.workdir + Files + .walk(workdir) + .sorted(Comparator.reverseOrder()) + .forEach(Files.delete) + linked = null + config = null + } + + @Benchmark + def optimize(): Unit = { + val optimized = core.ScalaNative.optimize(config, linked) + assert(optimized.unavailable.size == 0) + } +} + +class OptimizeDebug extends OptimizerBench(build.Mode.debug) +class OptimizeReleaseFast extends OptimizerBench(build.Mode.releaseFast) +// Commented out becouse of long build times ~13 min +// class OptimizeReleaseFull extends OptimizerBench(build.Mode.releaseFull) diff --git a/tools/src/main/scala/scala/scalanative/build/Build.scala b/tools/src/main/scala/scala/scalanative/build/Build.scala index 292377d2ea..27d34c45bd 100644 --- a/tools/src/main/scala/scala/scalanative/build/Build.scala +++ b/tools/src/main/scala/scala/scalanative/build/Build.scala @@ -65,6 +65,8 @@ object Build { config.withClassPath(fclasspath) } + config.logger.debug(config.toString()) + // find and link val linked = { val entries = ScalaNative.entries(fconfig) diff --git a/tools/src/main/scala/scala/scalanative/build/GC.scala b/tools/src/main/scala/scala/scalanative/build/GC.scala index cafecdb4b2..8e7ecdf968 100644 --- a/tools/src/main/scala/scala/scalanative/build/GC.scala +++ b/tools/src/main/scala/scala/scalanative/build/GC.scala @@ -30,7 +30,7 @@ object GC { private[scalanative] case object None extends GC("none", Seq.empty, Seq("shared")) private[scalanative] case object Boehm - extends GC("boehm", Seq("gc"), Seq.empty) + extends GC("boehm", Seq("gc"), Seq("shared")) private[scalanative] case object Immix extends GC("immix", Seq.empty, Seq("shared", "immix_commix")) private[scalanative] case object Commix diff --git a/tools/src/main/scala/scala/scalanative/build/NativeConfig.scala b/tools/src/main/scala/scala/scalanative/build/NativeConfig.scala index f2e162c2d8..91430fcdd5 100644 --- a/tools/src/main/scala/scala/scalanative/build/NativeConfig.scala +++ b/tools/src/main/scala/scala/scalanative/build/NativeConfig.scala @@ -65,6 +65,11 @@ sealed trait NativeConfig { */ def embedResources: Boolean + private[scalanative] lazy val configuredOrDetectedTriple = + TargetTriple.parse(targetTriple.getOrElse(Discover.targetTriple(this))) + + // update methods - order as properties above + /** Create a new config with given garbage collector. */ def withGC(value: GC): NativeConfig diff --git a/tools/src/main/scala/scala/scalanative/build/TargetTriple.scala b/tools/src/main/scala/scala/scalanative/build/TargetTriple.scala new file mode 100644 index 0000000000..7c4bd4f83b --- /dev/null +++ b/tools/src/main/scala/scala/scalanative/build/TargetTriple.scala @@ -0,0 +1,453 @@ +// ported from LLVM 887d6ab dated 2023-04-16 + +//===--- Triple.cpp - Target triple helper class --------------------------===// +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// +//===----------------------------------------------------------------------===// + +package scala.scalanative +package build + +import java.nio.ByteOrder + +private[scalanative] final case class TargetTriple( + arch: String, + vendor: String, + os: String, + env: String +) { + override def toString = s"$arch-$vendor-$os-$env" +} + +private[scalanative] object TargetTriple { + + def parse(triple: String): TargetTriple = { + val components = triple.split("-", 4).toList + val unknown = "unknown" + TargetTriple( + components.lift(0).map(Arch.parse).getOrElse(unknown), + components.lift(1).map(Vendor.parse).getOrElse(unknown), + components.lift(2).map(OS.parse).getOrElse(unknown), + components.lift(3).map(Env.parse).getOrElse(unknown) + ) + } + + object Arch { + def parse(str: String): String = str match { + case "i386" | "i486" | "i586" | "i686" => x86 + case "i786" | "i886" | "i986" => x86 + case "amd64" | "x86_64" | "x86_64h" => x86_64 + case "powerpc" | "powerpcspe" | "ppc" | "ppc32" => ppc + case "powerpcle" | "ppcle" | "ppc32le" => ppcle + case "powerpc64" | "ppu" | "ppc64" => ppc64 + case "powerpc64le" | "ppc64le" => ppc64le + case "xscale" => arm + case "xscaleeb" => armeb + case "aarch64" => aarch64 + case "aarch64_be" => aarch64_be + case "aarch64_32" => aarch64_32 + case "arc" => arc + case "arm64" => aarch64 + case "arm64_32" => aarch64_32 + case "arm64e" => aarch64 + case "arm64ec" => aarch64 + case "arm" => arm + case "armeb" => armeb + case "thumb" => thumb + case "thumbeb" => thumbeb + case "avr" => avr + case "m68k" => m68k + case "msp430" => msp430 + case "mips" | "mipseb" | "mipsallegrex" | "mipsisa32r6" | "mipsr6" => mips + case "mipsel" | "mipsallegrexel" | "mipsisa32r6el" | "mipsr6el" => + mipsel + case "mips64" | "mips64eb" | "mipsn32" | "mipsisa64r6" | "mips64r6" | + "mipsn32r6" => + mips64 + case "mips64el" | "mipsn32el" | "mipsisa64r6el" | "mips64r6el" | + "mipsn32r6el" => + mips64el + case "r600" => r600 + case "amdgcn" => amdgcn + case "riscv32" => riscv32 + case "riscv64" => riscv64 + case "hexagon" => hexagon + case "s390x" | "systemz" => systemz + case "sparc" => sparc + case "sparcel" => sparcel + case "sparcv9" | "sparc64" => sparcv9 + case "tce" => tce + case "tcele" => tcele + case "xcore" => xcore + case "nvptx" => nvptx + case "nvptx64" => nvptx64 + case "le32" => le32 + case "le64" => le64 + case "amdil" => amdil + case "amdil64" => amdil64 + case "hsail" => hsail + case "hsail64" => hsail64 + case "spir" => spir + case "spir64" => spir64 + case "spirv32" | "spirv32v1.0" | "spirv32v1.1" | "spirv32v1.2" | + "spirv32v1.3" | "spirv32v1.4" | "spirv32v1.5" => + spirv32 + case "spirv64" | "spirv64v1.0" | "spirv64v1.1" | "spirv64v1.2" | + "spirv64v1.3" | "spirv64v1.4" | "spirv64v1.5" => + spirv64 + case "lanai" => lanai + case "renderscript32" => renderscript32 + case "renderscript64" => renderscript64 + case "shave" => shave + case "ve" => ve + case "wasm32" => wasm32 + case "wasm64" => wasm64 + case "csky" => csky + case "loongarch32" => loongarch32 + case "loongarch64" => loongarch64 + case "dxil" => dxil + case "xtensa" => xtensa + case other => + // Some architectures require special parsing logic just to compute the + // ArchType result. + + if (other.startsWith("kalimba")) + kalimba + else if (other.startsWith("arm") || other.startsWith("thumb") || + other.startsWith("aarch64")) + parseArm(other) + else if (other.startsWith("bpf")) + parseBpf(other) + else + unknown + } + + private def parseArm(str: String): String = { + + val isa = + if (str.startsWith("aarch64") || str.startsWith("arm64")) aarch64 + else if (str.startsWith("thumb")) thumb + else if (str.startsWith("arm")) arm + else unknown + + val endian = + if (str.startsWith("armeb") || str.startsWith("thumbeb") || + str.startsWith("aarch64_be")) + Some(ByteOrder.BIG_ENDIAN) + else if (str.startsWith("arm") || str.startsWith("thumb")) { + if (str.endsWith("eb")) + Some(ByteOrder.BIG_ENDIAN) + else + Some(ByteOrder.LITTLE_ENDIAN) + } else if (str.startsWith("aarch64") || str.startsWith("aarch64_32")) + Some(ByteOrder.LITTLE_ENDIAN) + else None + + endian match { + case Some(ByteOrder.LITTLE_ENDIAN) => + isa match { + case `arm` => arm + case `thumb` => thumb + case `aarch64` => aarch64 + case _ => unknown + } + case Some(ByteOrder.BIG_ENDIAN) => + isa match { + case `arm` => armeb + case `thumb` => thumbeb + case `aarch64` => aarch64_be + case _ => unknown + } + case _ => unknown + } + } + + private def parseBpf(str: String): String = str match { + case "bpf" => + if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) + bpfel + else bpfeb + case "bpf_be" | "bpfeb" => bpfeb + case "bpf_le" | "bpfel" => bpfel + case _ => unknown + } + + final val unknown = "unknown" + final val aarch64 = "aarch64" + final val aarch64_32 = "aarch64_32" + final val aarch64_be = "aarch64_be" + final val amdgcn = "amdgcn" + final val amdil64 = "amdil64" + final val amdil = "amdil" + final val arc = "arc" + final val arm = "arm" + final val armeb = "armeb" + final val avr = "avr" + final val bpfeb = "bpfeb" + final val bpfel = "bpfel" + final val csky = "csky" + final val dxil = "dxil" + final val hexagon = "hexagon" + final val hsail64 = "hsail64" + final val hsail = "hsail" + final val kalimba = "kalimba" + final val lanai = "lanai" + final val le32 = "le32" + final val le64 = "le64" + final val loongarch32 = "loongarch32" + final val loongarch64 = "loongarch64" + final val m68k = "m68k" + final val mips64 = "mips64" + final val mips64el = "mips64el" + final val mips = "mips" + final val mipsel = "mipsel" + final val msp430 = "msp430" + final val nvptx64 = "nvptx64" + final val nvptx = "nvptx" + final val ppc64 = "powerpc64" + final val ppc64le = "powerpc64le" + final val ppc = "powerpc" + final val ppcle = "powerpcle" + final val r600 = "r600" + final val renderscript32 = "renderscript32" + final val renderscript64 = "renderscript64" + final val riscv32 = "riscv32" + final val riscv64 = "riscv64" + final val shave = "shave" + final val sparc = "sparc" + final val sparcel = "sparcel" + final val sparcv9 = "sparcv9" + final val spir64 = "spir64" + final val spir = "spir" + final val spirv32 = "spirv32" + final val spirv64 = "spirv64" + final val systemz = "s390x" + final val tce = "tce" + final val tcele = "tcele" + final val thumb = "thumb" + final val thumbeb = "thumbeb" + final val ve = "ve" + final val wasm32 = "wasm32" + final val wasm64 = "wasm64" + final val x86 = "i386" + final val x86_64 = "x86_64" + final val xcore = "xcore" + final val xtensa = "xtensa" + } + + object Vendor { + def parse(str: String): String = str match { + case "apple" => Apple + case "pc" => PC + case "scei" => SCEI + case "sie" => SCEI + case "fsl" => Freescale + case "ibm" => IBM + case "img" => ImaginationTechnologies + case "mti" => MipsTechnologies + case "nvidia" => NVIDIA + case "csr" => CSR + case "myriad" => Myriad + case "amd" => AMD + case "mesa" => Mesa + case "suse" => SUSE + case "oe" => OpenEmbedded + case _ => Unknown + } + + final val Unknown = "unknown" + final val AMD = "amd" + final val Apple = "apple" + final val CSR = "csr" + final val Freescale = "fsl" + final val IBM = "ibm" + final val ImaginationTechnologies = "img" + final val Mesa = "mesa" + final val MipsTechnologies = "mti" + final val Myriad = "myriad" + final val NVIDIA = "nvidia" + final val OpenEmbedded = "oe" + final val PC = "pc" + final val SCEI = "scei" + final val SUSE = "suse" + } + + object OS { + def parse(str: String): String = str match { + case os if os.startsWith("ananas") => Ananas + case os if os.startsWith("cloudabi") => CloudABI + case os if os.startsWith("darwin") => Darwin + case os if os.startsWith("dragonfly") => DragonFly + case os if os.startsWith("freebsd") => FreeBSD + case os if os.startsWith("fuchsia") => Fuchsia + case os if os.startsWith("ios") => IOS + case os if os.startsWith("kfreebsd") => KFreeBSD + case os if os.startsWith("linux") => Linux + case os if os.startsWith("lv2") => Lv2 + case os if os.startsWith("macos") => MacOSX + case os if os.startsWith("netbsd") => NetBSD + case os if os.startsWith("openbsd") => OpenBSD + case os if os.startsWith("solaris") => Solaris + case os if os.startsWith("win32") => Win32 + case os if os.startsWith("windows") => Win32 + case os if os.startsWith("zos") => ZOS + case os if os.startsWith("haiku") => Haiku + case os if os.startsWith("minix") => Minix + case os if os.startsWith("rtems") => RTEMS + case os if os.startsWith("nacl") => NaCl + case os if os.startsWith("aix") => AIX + case os if os.startsWith("cuda") => CUDA + case os if os.startsWith("nvcl") => NVCL + case os if os.startsWith("amdhsa") => AMDHSA + case os if os.startsWith("ps4") => PS4 + case os if os.startsWith("ps5") => PS5 + case os if os.startsWith("elfiamcu") => ELFIAMCU + case os if os.startsWith("tvos") => TvOS + case os if os.startsWith("watchos") => WatchOS + case os if os.startsWith("driverkit") => DriverKit + case os if os.startsWith("mesa3d") => Mesa3D + case os if os.startsWith("contiki") => Contiki + case os if os.startsWith("amdpal") => AMDPAL + case os if os.startsWith("hermit") => HermitCore + case os if os.startsWith("hurd") => Hurd + case os if os.startsWith("wasi") => WASI + case os if os.startsWith("emscripten") => Emscripten + case os if os.startsWith("shadermodel") => ShaderModel + case os if os.startsWith("liteos") => LiteOS + case _ => Unknown + } + + final val Unknown = "unknown" + final val AIX = "aix" + final val AMDHSA = "amdhsa" + final val AMDPAL = "amdpal" + final val Ananas = "ananas" + final val CUDA = "cuda" + final val CloudABI = "cloudabi" + final val Contiki = "contiki" + final val Darwin = "darwin" + final val DragonFly = "dragonfly" + final val DriverKit = "driverkit" + final val ELFIAMCU = "elfiamcu" + final val Emscripten = "emscripten" + final val FreeBSD = "freebsd" + final val Fuchsia = "fuchsia" + final val Haiku = "haiku" + final val HermitCore = "hermit" + final val Hurd = "hurd" + final val IOS = "ios" + final val KFreeBSD = "kfreebsd" + final val Linux = "linux" + final val Lv2 = "lv2" + final val MacOSX = "macosx" + final val Mesa3D = "mesa3d" + final val Minix = "minix" + final val NVCL = "nvcl" + final val NaCl = "nacl" + final val NetBSD = "netbsd" + final val OpenBSD = "openbsd" + final val PS4 = "ps4" + final val PS5 = "ps5" + final val RTEMS = "rtems" + final val Solaris = "solaris" + final val TvOS = "tvos" + final val WASI = "wasi" + final val WatchOS = "watchos" + final val Win32 = "windows" + final val ZOS = "zos" + final val ShaderModel = "shadermodel" + final val LiteOS = "liteos" + } + + object Env { + def parse(str: String): String = str match { + case env if env.startsWith("eabihf") => EABIHF + case env if env.startsWith("eabi") => EABI + case env if env.startsWith("gnuabin32") => GNUABIN32 + case env if env.startsWith("gnuabi64") => GNUABI64 + case env if env.startsWith("gnueabihf") => GNUEABIHF + case env if env.startsWith("gnueabi") => GNUEABI + case env if env.startsWith("gnuf32") => GNUF32 + case env if env.startsWith("gnuf64") => GNUF64 + case env if env.startsWith("gnusf") => GNUSF + case env if env.startsWith("gnux32") => GNUX32 + case env if env.startsWith("gnu_ilp32") => GNUILP32 + case env if env.startsWith("code16") => CODE16 + case env if env.startsWith("gnu") => GNU + case env if env.startsWith("android") => Android + case env if env.startsWith("musleabihf") => MuslEABIHF + case env if env.startsWith("musleabi") => MuslEABI + case env if env.startsWith("muslx32") => MuslX32 + case env if env.startsWith("musl") => Musl + case env if env.startsWith("msvc") => MSVC + case env if env.startsWith("itanium") => Itanium + case env if env.startsWith("cygnus") => Cygnus + case env if env.startsWith("coreclr") => CoreCLR + case env if env.startsWith("simulator") => Simulator + case env if env.startsWith("macabi") => MacABI + case env if env.startsWith("pixel") => Pixel + case env if env.startsWith("vertex") => Vertex + case env if env.startsWith("geometry") => Geometry + case env if env.startsWith("hull") => Hull + case env if env.startsWith("domain") => Domain + case env if env.startsWith("compute") => Compute + case env if env.startsWith("library") => Library + case env if env.startsWith("raygeneration") => RayGeneration + case env if env.startsWith("intersection") => Intersection + case env if env.startsWith("anyhit") => AnyHit + case env if env.startsWith("closesthit") => ClosestHit + case env if env.startsWith("miss") => Miss + case env if env.startsWith("callable") => Callable + case env if env.startsWith("mesh") => Mesh + case env if env.startsWith("amplification") => Amplification + case env if env.startsWith("ohos") => OpenHOS + case _ => Unknown + } + + final val Unknown = "unknown" + final val Android = "android" + final val CODE16 = "code16" + final val CoreCLR = "coreclr" + final val Cygnus = "cygnus" + final val EABI = "eabi" + final val EABIHF = "eabihf" + final val GNU = "gnu" + final val GNUABI64 = "gnuabi64" + final val GNUABIN32 = "gnuabin32" + final val GNUEABI = "gnueabi" + final val GNUEABIHF = "gnueabihf" + final val GNUF32 = "gnuf32" + final val GNUF64 = "gnuf64" + final val GNUSF = "gnusf" + final val GNUX32 = "gnux32" + final val GNUILP32 = "gnu_ilp32" + final val Itanium = "itanium" + final val MSVC = "msvc" + final val MacABI = "macabi" + final val Musl = "musl" + final val MuslEABI = "musleabi" + final val MuslEABIHF = "musleabihf" + final val MuslX32 = "muslx32" + final val Simulator = "simulator" + final val Pixel = "pixel" + final val Vertex = "vertex" + final val Geometry = "geometry" + final val Hull = "hull" + final val Domain = "domain" + final val Compute = "compute" + final val Library = "library" + final val RayGeneration = "raygeneration" + final val Intersection = "intersection" + final val AnyHit = "anyhit" + final val ClosestHit = "closesthit" + final val Miss = "miss" + final val Callable = "callable" + final val Mesh = "mesh" + final val Amplification = "amplification" + final val OpenHOS = "ohos" + } + +} diff --git a/tools/src/main/scala/scala/scalanative/build/core/ScalaNative.scala b/tools/src/main/scala/scala/scalanative/build/core/ScalaNative.scala index 0b9a6961d6..e400e59945 100644 --- a/tools/src/main/scala/scala/scalanative/build/core/ScalaNative.scala +++ b/tools/src/main/scala/scala/scalanative/build/core/ScalaNative.scala @@ -30,7 +30,7 @@ private[scalanative] object ScalaNative { scope: Scope ): linker.Result = dump(config, "linked") { - check(config) { + check(config, forceQuickCheck = true) { config.logger.time("Linking")(Link(config, entries)) } } @@ -102,59 +102,32 @@ private[scalanative] object ScalaNative { /** Run NIR checker on the linker result. */ def check( config: Config + )(linked: scalanative.linker.Result): scalanative.linker.Result = + check(config, forceQuickCheck = false)(linked) + + private def check( + config: Config, + forceQuickCheck: Boolean )(linked: scalanative.linker.Result): scalanative.linker.Result = { - if (config.check) { - config.logger.time("Checking intermediate code") { - def warn(s: String) = - if (config.compilerConfig.checkFatalWarnings) config.logger.error(s) - else config.logger.warn(s) - val errors = Check(linked) + val performFullCheck = config.check + val checkMode = if (performFullCheck) "full" else "quick" + if (config.check || forceQuickCheck) { + config.logger.time(s"Checking intermediate code ($checkMode)") { + val fatalWarnings = config.compilerConfig.checkFatalWarnings + val errors = + if (performFullCheck) Check(linked) + else Check.quick(linked) if (errors.nonEmpty) { - val grouped = - mutable.Map.empty[Global, mutable.UnrolledBuffer[Check.Error]] - errors.foreach { err => - val errs = - grouped.getOrElseUpdate(err.name, mutable.UnrolledBuffer.empty) - errs += err - } - grouped.foreach { - case (name, errs) => - warn("") - warn(s"Found ${errs.length} errors on ${name.show} :") - warn("") - linked.defns - .collectFirst { - case defn if defn != null && defn.name == name => defn - } - .foreach { defn => - val str = defn.show - val lines = str.split("\n") - lines.zipWithIndex.foreach { - case (line, idx) => - warn( - String - .format( - " %04d ", - java.lang.Integer.valueOf(idx) - ) + line - ) - } - } - warn("") - errs.foreach { err => - warn(" in " + err.ctx.reverse.mkString(" / ") + " : ") - warn(" " + err.msg) - } - - } - warn("") - warn(s"${errors.size} errors found") - - if (config.compilerConfig.checkFatalWarnings) { + showErrors( + log = + if (fatalWarnings) config.logger.error(_) + else config.logger.warn(_), + showContext = performFullCheck + )(errors, linked) + if (fatalWarnings) throw new BuildException( "Fatal warning(s) found; see the error output for details." ) - } } } } @@ -162,6 +135,39 @@ private[scalanative] object ScalaNative { linked } + private def showErrors( + log: String => Unit, + showContext: Boolean + )(errors: Seq[Check.Error], linked: linker.Result): Unit = { + errors + .groupBy(_.name) + .foreach { + case (name, errs) => + log(s"\nFound ${errs.length} errors on ${name.show} :") + def showError(err: Check.Error): Unit = log(" " + err.msg) + if (showContext) { + linked.defns + .collectFirst { + case defn if defn != null && defn.name == name => defn + } + .foreach { defn => + val str = defn.show + val lines = str.split("\n") + lines.zipWithIndex.foreach { + case (line, idx) => + log(String.format(" %04d ", Integer.valueOf(idx)) + line) + } + } + log("") + errs.foreach { err => + log(" in " + err.ctx.reverse.mkString(" / ") + " : ") + showError(err) + } + } else errs.foreach(showError) + } + log(s"\n${errors.size} errors found") + } + def dump(config: Config, phase: String)( linked: scalanative.linker.Result ): scalanative.linker.Result = { diff --git a/tools/src/main/scala/scala/scalanative/checker/Check.scala b/tools/src/main/scala/scala/scalanative/checker/Check.scala index f84b90171a..a847aaa01f 100644 --- a/tools/src/main/scala/scala/scalanative/checker/Check.scala +++ b/tools/src/main/scala/scala/scalanative/checker/Check.scala @@ -7,25 +7,11 @@ import scalanative.linker._ import scalanative.util.partitionBy import scalanative.compat.CompatParColls.Converters._ -final class Check(implicit linked: linker.Result) { +sealed abstract class NIRCheck(implicit linked: linker.Result) { val errors = mutable.UnrolledBuffer.empty[Check.Error] - - val labels = mutable.Map.empty[Local, Seq[Type]] - val env = mutable.Map.empty[Local, Type] - var name: Global = Global.None - var retty: Type = Type.Unit var ctx: List[String] = Nil - def in[T](entry: String)(f: => T): T = { - try { - ctx = entry :: ctx - f - } finally { - ctx = ctx.tail - } - } - def ok: Unit = () def error(msg: String): Unit = @@ -46,13 +32,63 @@ final class Check(implicit linked: linker.Result) { } def checkInfo(info: Info): Unit = info match { - case meth: Method => - checkMethod(meth) - case _ => - ok + case meth: Method => checkMethod(meth) + case _ => ok + } + + def checkMethod(meth: Method): Unit + + final protected def checkFieldOp(op: Op.Field): Unit = { + val Op.Field(obj, name) = op + obj.ty match { + case ScopeRef(scope) => + scope.implementors.foreach { cls => + if (cls.fields.exists(_.name == name)) ok + else error(s"can't acces field '${name.show}' in ${cls.name.show}") + } + case ty => error(s"can't access fields of a non-class type ${ty.show}") + } } - def checkMethod(meth: Method): Unit = { + final protected def checkMethodOp(op: Op.Method): Unit = { + val Op.Method(obj, sig) = op + expect(Rt.Object, obj) + sig match { + case sig if sig.isMethod || sig.isCtor || sig.isGenerated => ok + case _ => error(s"method must take a method signature, not ${sig.show}") + } + + def checkCallable(cls: Class): Unit = + if (cls.allocated && cls.resolve(sig).isEmpty) { + error(s"can't call ${sig.show} on ${cls.name.show}") + } + + obj.ty match { + case Type.Null => ok + case ScopeRef(info) if sig.isVirtual => + info.implementors.foreach(checkCallable) + case ClassRef(info) => + checkCallable(info) + case ty => error(s"can't resolve method on ${ty.show}") + } + } +} + +final class Check(implicit linked: linker.Result) extends NIRCheck { + val labels = mutable.Map.empty[Local, Seq[Type]] + val env = mutable.Map.empty[Local, Type] + + var retty: Type = Type.Unit + + def in[T](entry: String)(f: => T): T = { + try { + ctx = entry :: ctx + f + } finally { + ctx = ctx.tail + } + } + override def checkMethod(meth: Method): Unit = { val Type.Function(_, methRetty) = meth.ty: @unchecked retty = methRetty @@ -196,43 +232,8 @@ final class Check(implicit linked: linker.Result) { checkFieldOp(ty, obj, name, None) case Op.Fieldstore(ty, obj, name, value) => checkFieldOp(ty, obj, name, Some(value)) - case Op.Field(obj, name) => - obj.ty match { - case ScopeRef(scope) => - scope.implementors.foreach { cls => - if (cls.fields.exists(_.name == name)) ok - else error(s"can't acces field '${name.show}' in ${cls.name.show}") - } - case ty => - error(s"can't access fields of a non-class type ${ty.show}") - } - case Op.Method(obj, sig) => - expect(Rt.Object, obj) - sig match { - case sig if sig.isMethod || sig.isCtor || sig.isGenerated => - ok - case _ => - error(s"method must take a method signature, not ${sig.show}") - } - - def checkCallable(cls: Class): Unit = { - if (cls.allocated) { - if (cls.resolve(sig).isEmpty) { - error(s"can't call ${sig.show} on ${cls.name.show}") - } - } - } - - obj.ty match { - case Type.Null => - ok - case ScopeRef(info) if sig.isVirtual => - info.implementors.foreach(checkCallable) - case ClassRef(info) => - checkCallable(info) - case ty => - error(s"can't resolve method on ${ty.show}") - } + case op: Op.Field => checkFieldOp(op) + case op: Op.Method => checkMethodOp(op) case Op.Dynmethod(obj, sig) => expect(Rt.Object, obj) sig match { @@ -672,18 +673,43 @@ final class Check(implicit linked: linker.Result) { } } +final class QuickCheck(implicit linked: linker.Result) extends NIRCheck { + override def checkMethod(meth: Method): Unit = { + meth.insts.foreach(checkInst) + } + + def checkInst(inst: Inst): Unit = inst match { + case Inst.Let(_, op, _) => checkOp(op) + case _ => ok + } + + def checkOp(op: Op): Unit = op match { + case op: Op.Field => checkFieldOp(op) + case op: Op.Method => checkMethodOp(op) + case _ => ok + } + +} + object Check { final case class Error(name: Global, ctx: List[String], msg: String) - def apply(linked: linker.Result): Seq[Error] = + private def run( + checkImpl: linker.Result => NIRCheck + )(linked: linker.Result): Seq[Error] = partitionBy(linked.infos.values.toSeq)(_.name).par .map { case (_, infos) => - val check = new Check()(linked) + val check = checkImpl(linked) check.run(infos) check.errors } .seq .flatten .toSeq + + def apply(linked: linker.Result): Seq[Error] = + run(new Check()(_))(linked) + def quick(linked: linker.Result): Seq[Error] = + run(new QuickCheck()(_))(linked) } diff --git a/tools/src/main/scala/scala/scalanative/codegen/BitMatrix.scala b/tools/src/main/scala/scala/scalanative/codegen/BitMatrix.scala new file mode 100644 index 0000000000..57d1577886 --- /dev/null +++ b/tools/src/main/scala/scala/scalanative/codegen/BitMatrix.scala @@ -0,0 +1,26 @@ +package scala.scalanative + +private[scalanative] class BitMatrix private ( + bits: Array[Int], + columns: Int +) { + import BitMatrix.{AddressBitsPerWord, ElementSize, RightBits} + + def set(row: Int, col: Int): Unit = { + val bitIndex = row * columns + col + bits(bitIndex >> AddressBitsPerWord) |= 1 << (bitIndex & RightBits) + } + + def toSeq = bits.toSeq +} +private[scalanative] object BitMatrix { + private[scalanative] final val AddressBitsPerWord = 5 // Int Based 2^5 = 32 + private[scalanative] final val ElementSize = 1 << AddressBitsPerWord + private[scalanative] final val RightBits = ElementSize - 1 + + def apply(rows: Int, columns: Int): BitMatrix = { + val nbits = rows * columns + val length = (nbits + RightBits) >> AddressBitsPerWord + new BitMatrix(new Array[Int](length), columns) + } +} diff --git a/tools/src/main/scala/scala/scalanative/codegen/Generate.scala b/tools/src/main/scala/scala/scalanative/codegen/Generate.scala index 281ed2c15d..8e9006fc24 100644 --- a/tools/src/main/scala/scala/scalanative/codegen/Generate.scala +++ b/tools/src/main/scala/scala/scalanative/codegen/Generate.scala @@ -9,6 +9,14 @@ import scala.scalanative.build.Logger object Generate { import Impl._ + val ClassHasTraitName = + Global.Member(rttiModule, Sig.Extern("__check_class_has_trait")) + val ClassHasTraitSig = Type.Function(Seq(Type.Int, Type.Int), Type.Bool) + + val TraitHasTraitName = + Global.Member(rttiModule, Sig.Extern("__check_trait_has_trait")) + val TraitHasTraitSig = Type.Function(Seq(Type.Int, Type.Int), Type.Bool) + def apply(entry: Option[Global.Top], defns: Seq[Defn])(implicit meta: Metadata ): Seq[Defn] = @@ -66,27 +74,112 @@ object Generate { } def genClassHasTrait(): Unit = { + genHasTrait( + ClassHasTraitName, + ClassHasTraitSig, + meta.hasTraitTables.classHasTraitTy, + meta.hasTraitTables.classHasTraitVal + ) + } + + def genTraitHasTrait(): Unit = { + genHasTrait( + TraitHasTraitName, + TraitHasTraitSig, + meta.hasTraitTables.traitHasTraitTy, + meta.hasTraitTables.traitHasTraitVal + ) + } + + // BitMatrix get adapted from the java.util.BitSet implementation. + // Equivalent to the following Scala code: + // def get_[class,trait]_has_trait(firstid: Int, secondid: Int): Boolean = { + // val bitIndex = firstid * meta.traits.length + secondid + // (table(bitIndex >> AddressBitsPerWord) & (1 << (bitIndex & RightBits))) != 0 + // } + private def genHasTrait( + name: Global.Member, + sig: Type.Function, + tableTy: Type, + tableVal: Val + ): Unit = { implicit val fresh = Fresh() - val classid, traitid = Val.Local(fresh(), Type.Int) - val boolptr = Val.Local(fresh(), Type.Ptr) + val firstid, secondid = Val.Local(fresh(), Type.Int) + val row = Val.Local(fresh(), Type.Int) + val columns = Val.Int(meta.traits.length) + val bitIndex = Val.Local(fresh(), Type.Int) + val arrayPos = Val.Local(fresh(), Type.Int) + val intptr = Val.Local(fresh(), Type.Ptr) + val int = Val.Local(fresh(), Type.Int) + val toShift = Val.Local(fresh(), Type.Int) + val mask = Val.Local(fresh(), Type.Int) + val and = Val.Local(fresh(), Type.Int) val result = Val.Local(fresh(), Type.Bool) + def let(local: Val.Local, op: Op) = Inst.Let(local.name, op, Next.None) + buf += Defn.Define( Attrs(inlineHint = Attr.AlwaysInline), - ClassHasTraitName, - ClassHasTraitSig, + name, + sig, Seq( - Inst.Label(fresh(), Seq(classid, traitid)), - Inst.Let( - boolptr.name, + Inst.Label(fresh(), Seq(firstid, secondid)), + let(row, Op.Bin(Bin.Imul, Type.Int, firstid, columns)), + let(bitIndex, Op.Bin(Bin.Iadd, Type.Int, row, secondid)), + let( + arrayPos, + Op.Bin( + Bin.Ashr, + Type.Int, + bitIndex, + Val.Int(BitMatrix.AddressBitsPerWord) + ) + ), + let( + intptr, Op.Elem( - meta.hasTraitTables.classHasTraitTy, - meta.hasTraitTables.classHasTraitVal, - Seq(Val.Int(0), classid, traitid) - ), - Next.None + tableTy, + tableVal, + Seq(Val.Int(0), arrayPos) + ) + ), + let(int, Op.Load(Type.Int, intptr)), + let( + toShift, + Op.Bin( + Bin.And, + Type.Int, + bitIndex, + Val.Int(BitMatrix.RightBits) + ) + ), + let( + mask, + Op.Bin( + Bin.Shl, + Type.Int, + Val.Int(1), + toShift + ) + ), + let( + and, + Op.Bin( + Bin.And, + Type.Int, + int, + mask + ) + ), + let( + result, + Op.Comp( + Comp.Ine, + Type.Int, + and, + Val.Int(0) + ) ), - Inst.Let(result.name, Op.Load(Type.Bool, boolptr), Next.None), Inst.Ret(result) ) ) @@ -100,33 +193,6 @@ object Generate { } } - def genTraitHasTrait(): Unit = { - implicit val fresh = Fresh() - val leftid, rightid = Val.Local(fresh(), Type.Int) - val boolptr = Val.Local(fresh(), Type.Ptr) - val result = Val.Local(fresh(), Type.Bool) - - buf += Defn.Define( - Attrs(inlineHint = Attr.AlwaysInline), - TraitHasTraitName, - TraitHasTraitSig, - Seq( - Inst.Label(fresh(), Seq(leftid, rightid)), - Inst.Let( - boolptr.name, - Op.Elem( - meta.hasTraitTables.traitHasTraitTy, - meta.hasTraitTables.traitHasTraitVal, - Seq(Val.Int(0), leftid, rightid) - ), - Next.None - ), - Inst.Let(result.name, Op.Load(Type.Bool, boolptr), Next.None), - Inst.Ret(result) - ) - ) - } - /* Generate set of instructions using common exception handling, generate method * would return 0 if would execute successfully exception and 1 in otherwise */ private def withExceptionHandler( @@ -448,14 +514,6 @@ object Generate { private object Impl { val rttiModule = Global.Top("java.lang.rtti$") - val ClassHasTraitName = - Global.Member(rttiModule, Sig.Extern("__check_class_has_trait")) - val ClassHasTraitSig = Type.Function(Seq(Type.Int, Type.Int), Type.Bool) - - val TraitHasTraitName = - Global.Member(rttiModule, Sig.Extern("__check_trait_has_trait")) - val TraitHasTraitSig = Type.Function(Seq(Type.Int, Type.Int), Type.Bool) - val ObjectArray = Type.Ref(Global.Top("scala.scalanative.runtime.ObjectArray")) diff --git a/tools/src/main/scala/scala/scalanative/codegen/HasTraitTables.scala b/tools/src/main/scala/scala/scalanative/codegen/HasTraitTables.scala index 4fea7bc434..2ed2a13bc0 100644 --- a/tools/src/main/scala/scala/scalanative/codegen/HasTraitTables.scala +++ b/tools/src/main/scala/scala/scalanative/codegen/HasTraitTables.scala @@ -7,12 +7,12 @@ import scalanative.linker.{Trait, Class} class HasTraitTables(meta: Metadata) { private implicit val pos: Position = Position.NoPosition - val classHasTraitName = Global.Top("__class_has_trait") + private val classHasTraitName = Global.Top("__class_has_trait") val classHasTraitVal = Val.Global(classHasTraitName, Type.Ptr) var classHasTraitTy: Type = _ var classHasTraitDefn: Defn = _ - val traitHasTraitName = Global.Top("__trait_has_trait") + private val traitHasTraitName = Global.Top("__trait_has_trait") val traitHasTraitVal = Val.Global(traitHasTraitName, Type.Ptr) var traitHasTraitTy: Type = _ var traitHasTraitDefn: Defn = _ @@ -20,43 +20,44 @@ class HasTraitTables(meta: Metadata) { initClassHasTrait() initTraitHasTrait() - def markTraits(row: Array[Boolean], cls: Class): Unit = { - cls.traits.foreach(markTraits(row, _)) - cls.parent.foreach(markTraits(row, _)) + private def markTraits(matrix: BitMatrix, row: Int, cls: Class): Unit = { + cls.traits.foreach(markTraits(matrix, row, _)) + cls.parent.foreach(markTraits(matrix, row, _)) } - def markTraits(row: Array[Boolean], trt: Trait): Unit = { - row(meta.ids(trt)) = true - trt.traits.foreach { right => row(meta.ids(right)) = true } - trt.traits.foreach(markTraits(row, _)) + private def markTraits(matrix: BitMatrix, row: Int, trt: Trait): Unit = { + matrix.set(row, meta.ids(trt)) + trt.traits.foreach(markTraits(matrix, row, _)) } - def initClassHasTrait(): Unit = { - val columns = meta.classes.map { cls => - val row = new Array[Boolean](meta.traits.length) - markTraits(row, cls) - Val.ArrayValue(Type.Bool, row.toSeq.map(Val.Bool)) + private def initClassHasTrait(): Unit = { + val matrix = BitMatrix(meta.classes.length, meta.traits.length) + var row = 0 + meta.classes.foreach { cls => + markTraits(matrix, row, cls) + + row += 1 } - val table = - Val.ArrayValue(Type.ArrayValue(Type.Bool, meta.traits.length), columns) + val tableVal = Val.ArrayValue(Type.Int, matrix.toSeq.map(i => Val.Int(i))) - classHasTraitTy = table.ty classHasTraitDefn = - Defn.Const(Attrs.None, classHasTraitName, table.ty, table) + Defn.Const(Attrs.None, classHasTraitName, tableVal.ty, tableVal) + classHasTraitTy = tableVal.ty } - def initTraitHasTrait(): Unit = { - val columns = meta.traits.map { left => - val row = new Array[Boolean](meta.traits.length) - markTraits(row, left) - row(meta.ids(left)) = true - Val.ArrayValue(Type.Bool, row.toSeq.map(Val.Bool)) + private def initTraitHasTrait(): Unit = { + val matrix = BitMatrix(meta.traits.length, meta.traits.length) + var row = 0 + meta.traits.foreach { left => + markTraits(matrix, row, left) + matrix.set(row, meta.ids(left)) + + row += 1 } - val table = - Val.ArrayValue(Type.ArrayValue(Type.Bool, meta.traits.length), columns) + val tableVal = Val.ArrayValue(Type.Int, matrix.toSeq.map(l => Val.Int(l))) - traitHasTraitTy = table.ty traitHasTraitDefn = - Defn.Const(Attrs.None, traitHasTraitName, table.ty, table) + Defn.Const(Attrs.None, traitHasTraitName, tableVal.ty, tableVal) + traitHasTraitTy = tableVal.ty } } diff --git a/tools/src/main/scala/scala/scalanative/codegen/Lower.scala b/tools/src/main/scala/scala/scalanative/codegen/Lower.scala index 969a8159c8..78d648c3f8 100644 --- a/tools/src/main/scala/scala/scalanative/codegen/Lower.scala +++ b/tools/src/main/scala/scala/scalanative/codegen/Lower.scala @@ -758,16 +758,17 @@ object Lower { unwind ) val id = let(Op.Load(Type.Int, idptr), unwind) - val boolptr = let( - Op.Elem( - hasTraitTables.classHasTraitTy, - hasTraitTables.classHasTraitVal, - Seq(zero, id, Val.Int(meta.ids(trt))) + let( + Op.Call( + Generate.ClassHasTraitSig, + Val.Global( + Generate.ClassHasTraitName, + Generate.ClassHasTraitSig + ), + Seq(id, Val.Int(meta.ids(trt))) ), unwind ) - let(Op.Load(Type.Bool, boolptr), unwind) - case _ => util.unsupported(s"is[$ty] $obj") } diff --git a/tools/src/main/scala/scala/scalanative/linker/LinktimeValueResolver.scala b/tools/src/main/scala/scala/scalanative/linker/LinktimeValueResolver.scala index 77ff27dacd..3a3f488cb6 100644 --- a/tools/src/main/scala/scala/scalanative/linker/LinktimeValueResolver.scala +++ b/tools/src/main/scala/scala/scalanative/linker/LinktimeValueResolver.scala @@ -3,12 +3,14 @@ package scala.scalanative.linker import scala.collection.mutable import scala.scalanative.nir._ import scala.scalanative.build._ +import scala.scalanative.util.unsupported trait LinktimeValueResolver { self: Reach => import LinktimeValueResolver._ private lazy val linktimeProperties = { val conf = config.compilerConfig + val triple = conf.configuredOrDetectedTriple val linktimeInfo = "scala.scalanative.meta.linktimeinfo" val predefined: NativeConfig.LinktimeProperites = Map( s"$linktimeInfo.debugMode" -> (conf.mode == Mode.debug), @@ -22,7 +24,11 @@ trait LinktimeValueResolver { self: Reach => conf.gc == GC.Commix }, s"$linktimeInfo.isMsys" -> Platform.isMsys, - s"$linktimeInfo.isCygwin" -> Platform.isCygwin + s"$linktimeInfo.isCygwin" -> Platform.isCygwin, + s"$linktimeInfo.target.arch" -> triple.arch, + s"$linktimeInfo.target.vendor" -> triple.vendor, + s"$linktimeInfo.target.os" -> triple.os, + s"$linktimeInfo.target.env" -> triple.env ) NativeConfig.checkLinktimeProperties(predefined) predefined ++ conf.linktimeProperties @@ -37,8 +43,38 @@ trait LinktimeValueResolver { self: Reach => protected def resolveLinktimeDefine(defn: Defn.Define): Defn.Define = { implicit def position: Position = defn.pos - if (!defn.insts.exists(shouldResolveInst)) defn - else { + def evaluated() = { + implicit val fresh = Fresh() + lazy val buf = { + val buf = new Buffer() + buf += defn.insts.head + buf + } + + defn.insts match { + case Seq(_, Inst.Ret(_)) => defn + + case Seq( + _, + Inst.Let(_, ReferencedPropertyOp(propertyName), _), + Inst.Ret(_) + ) => + val value = resolveLinktimeProperty(propertyName) + resolvedValues.getOrElseUpdate(propertyName, value) + buf.ret(value.nirValue) + defn.copy(insts = buf.toSeq) + + case _ => + val mangledName = Mangle(defn.name) + val value = resolveLinktimeProperty(mangledName) + buf.ret(value.nirValue) + resolvedValues.getOrElseUpdate(mangledName, value) + defn.copy(insts = buf.toSeq) + } + + } + + def partiallyEvaluated() = { val resolvedInsts = ControlFlow.removeDeadBlocks { defn.insts.map { case inst: Inst.LinktimeIf => resolveLinktimeIf(inst) @@ -51,12 +87,49 @@ trait LinktimeValueResolver { self: Reach => defn.copy(insts = resolvedInsts) } - } - protected def shouldResolveInst(inst: Inst): Boolean = inst match { - case _: Inst.LinktimeIf => true - case Inst.Let(_, ReferencedPropertyOp(_), _) => true - case _ => false + def isRuntimeOnly(inst: Inst): Boolean = inst match { + case Inst.Label(_, _) => false + case Inst.LinktimeIf(_, _, _) => false + case Inst.Jump(_: Next.Label) => false + case Inst.Ret(_) => false + case Inst.Let(_, op, Next.None) => + op match { + case Op.Call(_, Val.Global(name, _), _) => + name != Linktime.PropertyResolveFunctionName && + !lookup(name).exists(_.attrs.isLinktimeResolved) + case _: Op.Comp => false + case _ => true + } + case _ => true + } + + def canBeEvauluated = + !defn.insts.exists(isRuntimeOnly) && { + defn.ty match { + case Type.Function(_, retty) => + retty match { + case _: Type.ValueKind => true + case Type.Ref(name, _, _) => name == Rt.String.name + case Type.Null => true + case _ => false + } + case _ => false + } + } + + def hasLinktimeResolvedInsts = defn.insts.exists { + case _: Inst.LinktimeIf => true + case Inst.Let(_, ReferencedPropertyOp(_), _) => true + case _ => false + } + + if (defn.attrs.isLinktimeResolved) + if (canBeEvauluated) evaluated() + else partiallyEvaluated() + else if (hasLinktimeResolvedInsts) // Legacy variant for 0.4.12- compat + partiallyEvaluated() + else defn } private def resolveLinktimeProperty(name: String)(implicit @@ -67,9 +140,30 @@ trait LinktimeValueResolver { self: Reach => private def lookupLinktimeProperty( propertyName: String )(implicit pos: Position): LinktimeValue = { - linktimeProperties - .get(propertyName) - .map(ComparableVal.fromAny(_).asAny) + def fromProvidedValue = + linktimeProperties + .get(propertyName) + .map(ComparableVal.fromAny(_).asAny) + + def fromCalculatedValue = + util + .Try(Unmangle.unmangleGlobal(propertyName)) + .toOption + .flatMap(lookup(_)) + .collect { + case defn: Defn.Define if defn.attrs.isLinktimeResolved => + try interpretLinktimeDefn(defn) + catch { + case ex: Exception => + throw new LinkingException( + s"Link-time method `$propertyName` cannot be interpreted at linktime" + ) + } + } + .map(ComparableVal.fromNir) + + fromProvidedValue + .orElse(fromCalculatedValue) .getOrElse { throw new LinkingException( s"Link-time property named `$propertyName` not defined in the config" @@ -123,7 +217,7 @@ trait LinktimeValueResolver { self: Reach => private def resolveLinktimeIf( inst: Inst.LinktimeIf - )(implicit pos: Position): Inst = { + )(implicit pos: Position): Inst.Jump = { val Inst.LinktimeIf(cond, thenp, elsep) = inst val matchesCondition = resolveCondition(cond) @@ -131,6 +225,58 @@ trait LinktimeValueResolver { self: Reach => else Inst.Jump(elsep) } + private def interpretLinktimeDefn(defn: Defn.Define): Val = { + require(defn.attrs.isLinktimeResolved) + val cf = ControlFlow.Graph(defn.insts) + val locals = scala.collection.mutable.Map.empty[Val.Local, Val] + + def resolveLocalVal(local: Val.Local): Val = locals(local) match { + case v: Val.Local => resolveLocalVal(v) + case value => value + } + + def interpretBlock(block: ControlFlow.Block): Val = { + def interpret(inst: Inst): Val = inst match { + case Inst.Ret(value) => + value match { + case v: Val.Local => resolveLocalVal(v) + case _ => value + } + + case Inst.Jump(next) => + val nextBlock = cf.find(next.name) + next match { + case Next.Label(_, values) => + locals ++= nextBlock.params.zip(values).toMap + case _ => + unsupported( + "Only normal labels are expected in linktime resolved methods" + ) + } + interpretBlock(nextBlock) + + case Inst.Label(next, params) => + val insts = cf.find(next).insts + assert(insts.size == 1) + interpret(insts.head) + + case branch: Inst.LinktimeIf => + interpret(resolveLinktimeIf(branch)(branch.pos)) + + case _: Inst.If | _: Inst.Let | _: Inst.Switch | _: Inst.Throw | + _: Inst.Unreachable => + unsupported( + "Unexpected instruction found in linktime resolved method: " + inst + ) + } + + // Linktime resolved values always have blocks of size 1 + assert(block.insts.size == 1) + interpret(block.insts.head) + } + interpretBlock(cf.entry) + } + } private[linker] object LinktimeValueResolver { diff --git a/tools/src/test/scala-3/scala/scalanative/NIRCompilerTest3.scala b/tools/src/test/scala-3/scala/scalanative/NIRCompilerTest3.scala index 20a2926fb5..5b7aae1b7b 100644 --- a/tools/src/test/scala-3/scala/scalanative/NIRCompilerTest3.scala +++ b/tools/src/test/scala-3/scala/scalanative/NIRCompilerTest3.scala @@ -140,27 +140,57 @@ class NIRCompilerTest3 extends AnyFlatSpec with Matchers with Inspectors { |""".stripMargin ) - it should "allow to report error if function passed to CFuncPtr.fromScalaFunction is not inlineable" in { + it should "report error when inlining extern function" in { intercept[CompilationFailedException] { NIRCompiler(_.compile(""" - |import scala.scalanative.unsafe.* - | - |opaque type Visitor = CFuncPtr1[Int, Int] - |object Visitor: - | def apply(f: Int => Int): Visitor = f - | - |@extern def useVisitor(x: Visitor): Unit = extern - | - |@main def test(n: Int): Unit = - | def callback(x: Int) = x*x + 2*n*n - | val visitor: Visitor = (n: Int) => n * 10 - | useVisitor(Visitor(callback)) - | useVisitor(Visitor(_ * 10)) - | useVisitor(visitor) - | - |""".stripMargin)) - }.getMessage should include( - "Function passed to method fromScalaFunction needs to be inlined" - ) + |import scala.scalanative.unsafe.* + | + |@extern object Foo{ + | inline def foo(): Int = extern + |} + |""".stripMargin)) + }.getMessage should include("Extern method cannot be inlined") + } + + it should "report error when inlining extern function in extern trait" in { + intercept[CompilationFailedException] { + NIRCompiler(_.compile(""" + |import scala.scalanative.unsafe.* + | + |@extern trait Foo{ + | inline def foo(): Int = extern + |} + |""".stripMargin)) + }.getMessage should include("Extern method cannot be inlined") + } + + it should "report error when inlining extern function in top-level" in { + intercept[CompilationFailedException] { + NIRCompiler(_.compile(""" + |import scala.scalanative.unsafe.* + | + |@extern inline def foo(): Int = extern + |""".stripMargin)) + }.getMessage should include("Extern method cannot be inlined") + } + + it should "report error when inlining exported function" in { + intercept[CompilationFailedException] { + NIRCompiler(_.compile(""" + |import scala.scalanative.unsafe.* + | + |@exported inline def foo(): Int = 42 + |""".stripMargin)) + }.getMessage should include("Exported method cannot be inlined") + } + + it should "report error when inlining exported field" in { + intercept[CompilationFailedException] { + NIRCompiler(_.compile(""" + |import scala.scalanative.unsafe.* + | + |@exportAccessors inline val foo: Int = 42 + |""".stripMargin)) + }.getMessage should include("Exported field cannot be inlined") } } diff --git a/tools/src/test/scala/scala/scalanative/LinkerSpec.scala b/tools/src/test/scala/scala/scalanative/LinkerSpec.scala index 760777cfc4..380edec6d3 100644 --- a/tools/src/test/scala/scala/scalanative/LinkerSpec.scala +++ b/tools/src/test/scala/scala/scalanative/LinkerSpec.scala @@ -3,7 +3,7 @@ package scala.scalanative import scala.language.implicitConversions import java.io.File import java.nio.file.{Files, Path, Paths} -import scalanative.build.{Config, NativeConfig} +import scalanative.build.{Config, NativeConfig, Discover} import scalanative.build.core.ScalaNative import scalanative.util.Scope import org.scalatest.flatspec.AnyFlatSpec @@ -61,7 +61,12 @@ abstract class LinkerSpec extends AnyFlatSpec { .withWorkdir(outDir) .withClassPath(classpath.toSeq) .withMainClass(entry) - .withCompilerConfig(setupNativeConfig) + .withCompilerConfig(setupNativeConfig.compose(withDefaults)) + } + + private def withDefaults(config: NativeConfig): NativeConfig = { + config + .withTargetTriple("x86_64-unknown-unknown") } protected implicit def String2MapStringString( diff --git a/tools/src/test/scala/scala/scalanative/build/TargetTripleTest.scala b/tools/src/test/scala/scala/scalanative/build/TargetTripleTest.scala new file mode 100644 index 0000000000..5aee15405d --- /dev/null +++ b/tools/src/test/scala/scala/scalanative/build/TargetTripleTest.scala @@ -0,0 +1,37 @@ +package scala.scalanative.build + +import org.scalatest._ +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +class TargetTripleTest extends AnyFlatSpec with Matchers { + + val cases = List( + "aarch64-unknown-linux-gnu" -> + TargetTriple("aarch64", "unknown", "linux", "gnu"), + "arm64-apple-darwin22.4.0" -> + TargetTriple("aarch64", "apple", "darwin", "unknown"), + "x86_64-apple-darwin13.4.0" -> + TargetTriple("x86_64", "apple", "darwin", "unknown"), + "x86_64-apple-darwin20.6.0" -> + TargetTriple("x86_64", "apple", "darwin", "unknown"), + "x86_64-apple-darwin21.6.0" -> + TargetTriple("x86_64", "apple", "darwin", "unknown"), + "x86_64-apple-darwin22.4.0" -> + TargetTriple("x86_64", "apple", "darwin", "unknown"), + "x86_64-pc-linux-gnu" -> + TargetTriple("x86_64", "pc", "linux", "gnu"), + "x86_64-pc-windows-msvc" -> + TargetTriple("x86_64", "pc", "windows", "msvc"), + "x86_64-portbld-freebsd13.1" -> + TargetTriple("x86_64", "unknown", "freebsd", "unknown") + ) + + "TargetTriple.parse" should "parse test cases" in { + cases.foreach { + case (triple, expected) => + TargetTriple.parse(triple) should equal(expected) + } + } + +} diff --git a/tools/src/test/scala/scala/scalanative/linker/LinktimeConditionsSpec.scala b/tools/src/test/scala/scala/scalanative/linker/LinktimeConditionsSpec.scala index 2e31a0d0c9..c71d50500d 100644 --- a/tools/src/test/scala/scala/scalanative/linker/LinktimeConditionsSpec.scala +++ b/tools/src/test/scala/scala/scalanative/linker/LinktimeConditionsSpec.scala @@ -51,10 +51,25 @@ class LinktimeConditionsSpec extends OptimizerSpec with Matchers { | } |}""".stripMargin - case class Entry[T](propertyName: String, value: T, lintimeValue: Val) + case class Entry[T](propertyName: String, value: T, linktimeValue: Val) - val defaultEntries = { + val ignoredNames = { val linktimeInfo = "scala.scalanative.meta.linktimeinfo" + Set( + s"$linktimeInfo.asanEnabled", + s"$linktimeInfo.is32BitPlatform", + "M36scala.scalanative.meta.LinktimeInfo$D9isFreeBSDzEO", + "M36scala.scalanative.meta.LinktimeInfo$D5isMaczEO", + "M36scala.scalanative.meta.LinktimeInfo$D9isWindowszEO", + s"$linktimeInfo.isWeakReferenceSupported", + s"$linktimeInfo.target.arch", + s"$linktimeInfo.target.vendor", + s"$linktimeInfo.target.os", + s"$linktimeInfo.target.env" + ) + } + + val defaultEntries = { Seq( Entry("int", 42, Val.Int(42)), Entry("bool", false, Val.False), @@ -62,9 +77,7 @@ class LinktimeConditionsSpec extends OptimizerSpec with Matchers { Entry("float", 3.14f, Val.Float(3.14f)), Entry("decimalSeparator", '-', Val.Char('-')), Entry("inner.countFrom", 123456L, Val.Long(123456L)), - Entry("secret.performance.multiplier", 9.99, Val.Double(9.99)), - // Always required linktime properties - Entry(s"$linktimeInfo.isWindows", false, Val.False) + Entry("secret.performance.multiplier", 9.99, Val.Double(9.99)) ) } val defaultProperties = defaultEntries.map(e => e.propertyName -> e.value) @@ -74,9 +87,11 @@ class LinktimeConditionsSpec extends OptimizerSpec with Matchers { "props.scala" -> props, "main.scala" -> allPropsUsage )(defaultProperties: _*) { (_, result) => + def normalized(seq: Iterable[String]): Set[String] = + seq.toSet.diff(ignoredNames) shouldContainAll( - defaultEntries.map(_.propertyName).toSet, - result.resolvedVals.keys + normalized(defaultEntries.map(_.propertyName)), + normalized(result.resolvedVals.keys) ) } } @@ -86,9 +101,14 @@ class LinktimeConditionsSpec extends OptimizerSpec with Matchers { "props.scala" -> props, "main.scala" -> allPropsUsage )(defaultProperties: _*) { (_, result) => + def normalized(elems: Map[String, Val]): Map[String, Val] = + elems.filter { case (key, _) => !ignoredNames.contains(key) } val expected = - for (e <- defaultEntries) yield e.propertyName -> e.lintimeValue - shouldContainAll(expected, result.resolvedVals) + defaultEntries.map { e => e.propertyName -> e.linktimeValue } + shouldContainAll( + normalized(expected.toMap), + normalized(result.resolvedVals.toMap) + ) } } @@ -407,6 +427,86 @@ class LinktimeConditionsSpec extends OptimizerSpec with Matchers { } } + it should "allow to define linktime methods calculated based on linktime values" in { + linkWithProps( + "props.scala" -> + """package scala.scalanative + | + |object props{ + | @scalanative.unsafe.resolvedAtLinktime("os") + | def os: String = scala.scalanative.unsafe.resolved + | + | @scalanative.unsafe.resolvedAtLinktime + | def isWindows: Boolean = os == "windows" + | + | @scalanative.unsafe.resolvedAtLinktime + | def isMac: Boolean = { + | @scalanative.unsafe.resolvedAtLinktime + | def vendor = "apple" + | + | os == "darwin" && vendor == "apple" + | } + | + | @scalanative.unsafe.resolvedAtLinktime + | def dynLibExt: String = + | if(isWindows) ".dll" + | else if(isMac) ".dylib" + | else ".so" + |} + |""".stripMargin, + "main.scala" -> """ + |import scala.scalanative.props._ + |object Main { + | def main(args: Array[String]): Unit = { + | println(dynLibExt) + | } + |}""".stripMargin + )("os" -> "darwin") { (_, result) => + val Props = Global.Top("scala.scalanative.props$") + def calculatedVal( + name: String, + ty: Type, + scope: Sig.Scope = Sig.Scope.Public + ) = { + val global = Props.member(Sig.Method(name, Seq(ty), scope)) + val mangled = scalanative.nir.Mangle(global) + result.resolvedVals.get(mangled) + } + result.resolvedVals("os") shouldEqual Val.String("darwin") + // nested method is defined as private + calculatedVal("vendor$1", Rt.String, Sig.Scope.Private(Props)) should + contain(Val.String("apple")) + calculatedVal("isWindows", Type.Bool) should contain(Val.False) + calculatedVal("isMac", Type.Bool) should contain(Val.True) + calculatedVal("dynLibExt", Rt.String) should contain(Val.String(".dylib")) + } + } + + it should "not allow to define linktime resolved vals in blocks" in { + val caught = intercept[scala.scalanative.api.CompilationFailedException] { + linkWithProps( + "props.scala" -> + """package scala.scalanative + |object props{ + | @scalanative.unsafe.resolvedAtLinktime + | def linktimeProperty = { + | val foo = 42 + | foo + | } + |}""".stripMargin, + "main.scala" -> + """import scala.scalanative.props._ + |object Main { + | def main(args: Array[String]): Unit = { + | if(linktimeProperty != 42) ??? + | } + |}""".stripMargin + )() { (_, _) => () } + } + // Multiple errors + // caught.getMessage should contain("Linktime resolved block can only contain other linktime resolved def defintions") + } + private def shouldContainAll[T]( expected: Iterable[T], actual: Iterable[T] diff --git a/tools/src/test/scala/scala/scalanative/linker/ReachabilitySuite.scala b/tools/src/test/scala/scala/scalanative/linker/ReachabilitySuite.scala index df85b466f0..f28cd6e8e2 100644 --- a/tools/src/test/scala/scala/scalanative/linker/ReachabilitySuite.scala +++ b/tools/src/test/scala/scala/scalanative/linker/ReachabilitySuite.scala @@ -108,6 +108,9 @@ trait ReachabilitySuite extends AnyFunSuite { default .withWorkdir(outDir) .withClassPath(paths.toSeq) + .withCompilerConfig { + _.withTargetTriple("x86_64-unknown-unknown") + } .withMainClass(mainClass) } } diff --git a/unit-tests/native/src/test/scala/scala/scalanative/runtime/HeapSizeTest.scala.scala b/unit-tests/native/src/test/scala/scala/scalanative/runtime/HeapSizeTest.scala.scala new file mode 100644 index 0000000000..82a53a84ae --- /dev/null +++ b/unit-tests/native/src/test/scala/scala/scalanative/runtime/HeapSizeTest.scala.scala @@ -0,0 +1,31 @@ +package scala.scalanative.runtime +import org.junit.Test +import org.junit.Before +import org.junit.Assert._ +import scala.scalanative.unsafe.CSize +import scalanative.unsigned.{ULong, UnsignedRichInt} + +class HeapSizeTest { + + @Before + val conversionFactor = (1024 * 1024 * 1024).toULong + val lowerBound: ULong = 0.toULong + val higherBound: ULong = 32.toULong * conversionFactor + + @Test def checkInitHeapSize(): Unit = { + val initHeapSz = GC.getInitHeapSize() + assertTrue( + s"0 <= ${initHeapSz / conversionFactor}GB < 32GB", + initHeapSz >= lowerBound && initHeapSz < higherBound + ) + } + + @Test def checkMaxHeapSize(): Unit = { + val maxHeapSize = GC.getMaxHeapSize() + assertTrue( + s"0 < ${maxHeapSize / conversionFactor}GB <= 32GB", + maxHeapSize > lowerBound && maxHeapSize <= higherBound + ) + } + +} diff --git a/unit-tests/native/src/test/scala/scala/scalanative/unsafe/CVarArgListTest.scala b/unit-tests/native/src/test/scala/scala/scalanative/unsafe/CVarArgListTest.scala index 3aff8a49bb..3247da9886 100644 --- a/unit-tests/native/src/test/scala/scala/scalanative/unsafe/CVarArgListTest.scala +++ b/unit-tests/native/src/test/scala/scala/scalanative/unsafe/CVarArgListTest.scala @@ -19,7 +19,8 @@ class CVarArgListTest { val got = fromCString(buff) assertTrue(s"$got != $output", got == output) } - + @Test def empty(): Unit = + vatest(c"hello", Seq(), "hello") @Test def byteValue0(): Unit = vatest(c"%d", Seq(0.toByte), "0") @Test def byteValue1(): Unit = diff --git a/unit-tests/native/src/test/scala/scala/scalanative/unsafe/CVarArgTest.scala b/unit-tests/native/src/test/scala/scala/scalanative/unsafe/CVarArgTest.scala index b9b10f8280..4d18af094c 100644 --- a/unit-tests/native/src/test/scala/scala/scalanative/unsafe/CVarArgTest.scala +++ b/unit-tests/native/src/test/scala/scala/scalanative/unsafe/CVarArgTest.scala @@ -21,6 +21,8 @@ class CVarArgTest { assertEquals(got, output) } + @Test def empty(): Unit = + vatest(c"hello", "hello")(stdio.sprintf(_, _)) @Test def byteValue0(): Unit = vatest(c"%d", "0")(stdio.sprintf(_, _, 0.toByte)) @Test def byteValue1(): Unit = diff --git a/unit-tests/shared/src/test/require-jdk16/org/scalanative/testsuite/javalib/util/stream/DoubleStreamTestOnJDK16.scala b/unit-tests/shared/src/test/require-jdk16/org/scalanative/testsuite/javalib/util/stream/DoubleStreamTestOnJDK16.scala new file mode 100644 index 0000000000..2846d14884 --- /dev/null +++ b/unit-tests/shared/src/test/require-jdk16/org/scalanative/testsuite/javalib/util/stream/DoubleStreamTestOnJDK16.scala @@ -0,0 +1,76 @@ +package org.scalanative.testsuite.javalib.util.stream + +import java.{lang => jl} +import java.util.Arrays +import java.util.stream._ + +import org.junit.Test +import org.junit.Assert._ + +import org.scalanative.testsuite.utils.AssertThrows.assertThrows + +class DoubleStreamTestOnJDK16 { + + // Since: Java 16 + @Test def streamMapMulti_Eliding(): Unit = { + val initialCount = 6 + val expectedCount = 4 + + val data = new Array[Double](initialCount) + data(0) = 5.5 + data(1) = 4.4 + data(2) = -1.1 + data(3) = 0.0 + data(4) = -2.2 + data(5) = 3.3 + + val s = Arrays.stream(data) + + // By design, the mapper will return empty results for two items. + val mappedMulti = s.mapMulti((element, consumer) => + if ((element != 0.0) && (element != 4.4)) { + consumer.accept(element) + } + ) + + var count = mappedMulti.count() + + assertTrue("unexpected empty stream", count > 0) + assertEquals("unexpected number of elements", expectedCount, count) + } + + // Since: Java 16 + @Test def streamMapMulti_Expanding(): Unit = { + + case class Item(name: String, info: Double) + + val initialCount = 6 + val expectedCount = 7 + + val data = new Array[Double](initialCount) + data(0) = 5.5 + data(1) = 4.4 + data(2) = -1.1 + data(3) = 0.0 + data(4) = -2.2 + data(5) = 3.3 + + val s = Arrays.stream(data) + + // Expand one item with multiple replacements. Otherwise 1 to 1. + val mappedMulti = s.mapMulti((element, consumer) => + if (element != 0.0) { + consumer.accept(element) + } else { + consumer.accept(jl.Double.NEGATIVE_INFINITY) + consumer.accept(jl.Double.POSITIVE_INFINITY) + } + ) + + var count = mappedMulti.count() + + assertTrue("unexpected empty stream", count > 0) + assertEquals("unexpected number of elements", expectedCount, count) + } + +} diff --git a/unit-tests/shared/src/test/require-jdk16/org/scalanative/testsuite/javalib/util/stream/StreamTestOnJDK16.scala b/unit-tests/shared/src/test/require-jdk16/org/scalanative/testsuite/javalib/util/stream/StreamTestOnJDK16.scala new file mode 100644 index 0000000000..52e710f88a --- /dev/null +++ b/unit-tests/shared/src/test/require-jdk16/org/scalanative/testsuite/javalib/util/stream/StreamTestOnJDK16.scala @@ -0,0 +1,188 @@ +package org.scalanative.testsuite.javalib.util.stream + +import java.util.Arrays +import java.util.function.Consumer +import java.util.stream._ + +import org.junit.Test +import org.junit.Assert._ +import org.junit.Ignore + +import org.scalanative.testsuite.utils.AssertThrows.assertThrows + +class StreamTestOnJDK16 { + + final val epsilon = 0.00001 // tolerance for Floating point comparisons. + + // Since: Java 16 + @Test def streamMapMulti_Eliding(): Unit = { + // By design, the mapper will return empty results for several items. + + val initialCount = 6 + val expectedCount = 3 + + val data = new Array[String](initialCount) + data(0) = "Hydrogen" + data(1) = "Helium" + data(2) = "" + data(3) = "Rabbit" + data(4) = "Beryllium" + data(5) = "Boron" + + val s = Arrays.stream(data) + + // Here the result type matches the element type. + // Next challenge, make the types differ. + val mappedMulti = + s.mapMulti((element: String, consumer: Consumer[_ >: String]) => + if (element == "Rabbit") { + for (j <- 1 to 3) + consumer.accept(s"Rabbit_${j}") + } + ) + + var count = mappedMulti.count() + + assertTrue("unexpected empty stream", count > 0) + assertEquals("unexpected number of elements", expectedCount, count) + } + + // Since: Java 16 + @Test def streamMapMulti_DifferingTypes(): Unit = { + // Test the Java mapMulti() use case description. + // expand one input element to zero or multiple output elements. + + case class Item(name: String, upc: Int) + + val initialCount = 6 + val expectedCount = 2 + + val data = new Array[Item](initialCount) + data(0) = Item("Hydrogen", 1) + data(1) = Item("Helium", 2) + data(2) = Item("", 3) + data(3) = Item("Rabbit", 4) + data(4) = Item("Beryllium", 5) + data(5) = Item("Boron", 6) + + val s = Arrays.stream(data) + + // By design & intent, the element and result types differ. + val mappedMulti = + s.mapMulti((element: Item, consumer: Consumer[_ >: String]) => + if (element.upc == 6) { + for (j <- 1 to 2) + consumer.accept(s"${element.name}_${j}") + } + ) + + var count = mappedMulti.count() + + assertTrue("unexpected empty stream", count > 0) + assertEquals("unexpected number of elements", expectedCount, count) + } + + // Since: Java 16 + @Test def streamMapMultiToDouble(): Unit = { + case class Item(name: String, upc: Int) + + val phi = 1.61803 + val expectedSum = 87.37362 // sum of after-mapped values, not pre-mapped + + val initialCount = 6 + + val data = new Array[Item](initialCount) + data(0) = Item("Hydrogen", 1) + data(1) = Item("Helium", 2) + data(2) = Item("", 3) + data(3) = Item("Rabbit", 4) + data(4) = Item("Beryllium", 5) + data(5) = Item("Boron", 6) + + val s = Arrays.stream(data) + + // By design & intent, the element and result types differ. + val mappedMultiToDouble = s.mapMultiToDouble((element, doubleConsumer) => + if (element.upc >= 3) { + for (j <- 1 to 2) // One way to increase your gold. + doubleConsumer.accept(j * element.upc * phi) + } + ) + + var sum = mappedMultiToDouble.sum() + + assertEquals("unexpected sum", expectedSum, sum, epsilon) + } + + // Since: Java 16 + @Test def streamToList_Empty(): Unit = { + val expectedCount = 0 + val data = new Array[Object](expectedCount) + + val s = Arrays.stream(data) + + val list = s.toList() + + val it = list.iterator() + assertFalse("unexpected non-empty list", it.hasNext()) + } + + // Since: Java 16 + @Test def streamToList_String(): Unit = { + val expectedCount = 7 + + val data = new Array[String](expectedCount) + data(0) = "The" + data(1) = "Difference" + data(2) = "Between" + data(3) = "me" + data(4) = "and" + data(5) = "a" + data(6) = "madman" + + val s = Arrays.stream(data) + + val list = s.toList() + + var count = 0 + + for (j <- 0 until data.size) { + assertEquals("mismatched element", data(j), list.get(j).toString()) + count += 1 + } + + assertTrue("unexpected empty list", count > 0) + assertEquals("unexpected number of elements", expectedCount, count) + } + + // Since: Java 16 + @Test def streamToList_ResultisUnmodifiable(): Unit = { + val expectedCount = 7 + + val data = new Array[String](expectedCount) + data(0) = "is" + data(1) = "that" + data(2) = "I" + data(3) = "am" + data(4) = "not" + data(5) = "mad" + data(6) = "!" + + val s = Arrays.stream(data) + + val list = s.toList() + + // can read + val j = 3 + assertEquals("", data(j), list.get(j).toString()) + + // but not modify + assertThrows( + classOf[UnsupportedOperationException], + list.set(6, "melted clock") + ) + + assertThrows(classOf[UnsupportedOperationException], list.remove(6)) + } + +} diff --git a/unit-tests/shared/src/test/require-jdk9/org/scalanative/testsuite/javalib/util/stream/DoubleStreamTestOnJDK9.scala b/unit-tests/shared/src/test/require-jdk9/org/scalanative/testsuite/javalib/util/stream/DoubleStreamTestOnJDK9.scala new file mode 100644 index 0000000000..0e12cff04b --- /dev/null +++ b/unit-tests/shared/src/test/require-jdk9/org/scalanative/testsuite/javalib/util/stream/DoubleStreamTestOnJDK9.scala @@ -0,0 +1,101 @@ +package org.scalanative.testsuite.javalib.util.stream + +import java.util.stream._ + +import org.junit.Test +import org.junit.Assert._ + +class DoubleStreamTestOnJDK9 { + + final val epsilon = 0.00001 // tolerance for Floating point comparisons. + + @Test def streamDropWhile_Empty(): Unit = { + val s = DoubleStream.empty() + + val remaining = s.dropWhile(_ < 0.0) + + assertFalse("stream should be empty", remaining.findFirst().isPresent) + } + + @Test def streamDropWhile_NoMatch(): Unit = { + val expectedRemainingCount = 6 + + val s = DoubleStream.of(1.1, 2.2, 4.4, 0.1, -0.1, 0.2) + + val remaining = s.dropWhile(_ > 10.0) + + assertEquals( + "unexpected remaining count", + expectedRemainingCount, + remaining.count() + ) + } + + @Test def streamDropWhile_SomeMatch(): Unit = { + val expectedRemainingCount = 4 + + val s = DoubleStream.of(1.1, 2.2, 4.4, 0.1, -0.1, 0.2) + + val remaining = s.dropWhile(_ < 3.0) + + assertEquals( + "unexpected remaining count", + expectedRemainingCount, + remaining.count() + ) + } + + @Test def streamIterate_BoundedByPredicate(): Unit = { + var count = -1 + val limit = 5 + + val expectedSeed = 2.71828 + + val s = DoubleStream.iterate( + expectedSeed, + e => count < limit, + e => { + count += 1 + e + 1.0 + } + ) + + val it = s.iterator() + + assertTrue("stream should not be empty", it.hasNext()) + + assertEquals(s"seed", expectedSeed, it.nextDouble(), epsilon) + + for (j <- 1 to limit) { + assertEquals(s"element: ${j}", expectedSeed + j, it.nextDouble(), epsilon) + } + + assertFalse("stream should be empty", it.hasNext()) + } + + @Test def streamTakeWhile_Empty(): Unit = { + val s = DoubleStream.empty() + + val taken = s.takeWhile(_ < 5.23) + + assertFalse("stream should be empty", taken.findFirst().isPresent) + } + + @Test def streamTakeWhile_NoMatch(): Unit = { + val s = DoubleStream.of(1.1, 2.2, 4.4, 0.1, -0.1, 0.2) + + val taken = s.takeWhile(_ > 10.10) + assertFalse("stream should be empty", taken.findFirst().isPresent) + } + + @Test def streamTakeWhile_SomeMatch(): Unit = { + val expectedTakenCount = 3 + + val s = DoubleStream.of(1.1, 2.2, 4.4, 0.1, -0.1, 0.2) + + val taken = s.takeWhile(_ > 0.5) + + assertEquals("unexpected taken count", expectedTakenCount, taken.count()) + } + +} diff --git a/unit-tests/shared/src/test/require-jdk9/org/scalanative/testsuite/javalib/util/stream/StreamTestOnJDK9.scala b/unit-tests/shared/src/test/require-jdk9/org/scalanative/testsuite/javalib/util/stream/StreamTestOnJDK9.scala new file mode 100644 index 0000000000..4a425a1ac9 --- /dev/null +++ b/unit-tests/shared/src/test/require-jdk9/org/scalanative/testsuite/javalib/util/stream/StreamTestOnJDK9.scala @@ -0,0 +1,157 @@ +package org.scalanative.testsuite.javalib.util.stream + +import java.util.stream._ + +import org.junit.Test +import org.junit.Assert._ + +class StreamTestOnJDK9 { + + final val no = false + final val yes = true + + case class Patron(hasTicket: Boolean, isRowdy: Boolean) + + @Test def streamDropWhile_Empty(): Unit = { + val s = Stream.empty[Patron]() + + val remaining = s.dropWhile((e) => e.hasTicket) + + assertFalse("stream should be empty", remaining.findFirst().isPresent) + } + + @Test def streamDropWhile_NoMatch(): Unit = { + val expectedRemainingCount = 4 + + val s = Stream.of( + Patron(hasTicket = no, isRowdy = no), + Patron(hasTicket = yes, isRowdy = no), + Patron(hasTicket = no, isRowdy = no), + Patron(hasTicket = yes, isRowdy = no) + ) + + val remaining = s.dropWhile((e) => e.isRowdy) + + assertEquals( + "unexpected remaining count", + expectedRemainingCount, + remaining.count() + ) + } + + @Test def streamDropWhile_SomeMatch(): Unit = { + val expectedRemainingCount = 2 + + val s = Stream.of( + Patron(hasTicket = no, isRowdy = yes), + Patron(hasTicket = yes, isRowdy = yes), + Patron(hasTicket = no, isRowdy = no), + Patron(hasTicket = yes, isRowdy = yes) + ) + + val remaining = s.dropWhile((e) => e.isRowdy) + + assertEquals( + "unexpected remaining count", + expectedRemainingCount, + remaining.count() + ) + } + + @Test def streamIterate_BoundedByPredicate(): Unit = { + var count = -1 + val limit = 5 + + // Use old style predicate rather than lambda to keep Scala 2.12 happy. + val predicate = new java.util.function.Predicate[String] { + def test(str: String): Boolean = count < limit + } + + val expectedSeed = "Red bellied woodpecker" + val s = Stream.iterate[String]( + expectedSeed, + predicate, + (e: String) => { // Specify parameter type to keep keep Scala 2.12 happy. + count += 1 + count.toString() + } + ) + + val it = s.iterator() + + assertTrue("stream should not be empty", it.hasNext()) + + assertEquals(s"seed", expectedSeed, it.next()) + + for (j <- 0 until limit) + assertEquals(s"element: ${j}", String.valueOf(j), it.next()) + + assertFalse("stream should be empty", it.hasNext()) + } + + @Test def streamOfNullable_Empty(): Unit = { + val s = Stream.ofNullable[String](null) + val it = s.iterator() + + assertFalse("stream should be empty", it.hasNext()) + } + + @Test def streamOfNullable_Singleton(): Unit = { + val expected = "Frodo" + val s = Stream.ofNullable[String](expected) + + val it = s.iterator() + + assertTrue("stream should not be empty", it.hasNext()) + assertEquals(s"singleton", expected, it.next()) + } + + @Test def streamOf_TypeDispatch(): Unit = { + val expected = "Frodo" + val s = Stream.ofNullable[String](expected) + + val it = s.iterator() + + assertTrue("stream should not be empty", it.hasNext()) + assertEquals(s"singleton", expected, it.next()) + } + + @Test def streamTakeWhile_Empty(): Unit = { + val s = Stream.empty[Patron]() + + val taken = s.takeWhile((e) => e.hasTicket) + + assertFalse("stream should be empty", taken.findFirst().isPresent) + } + + @Test def streamTakeWhile_NoMatch(): Unit = { + val s = Stream.of( + Patron(hasTicket = no, isRowdy = no), + Patron(hasTicket = yes, isRowdy = no), + Patron(hasTicket = no, isRowdy = yes), + Patron(hasTicket = yes, isRowdy = no) + ) + + val taken = s.takeWhile((e) => e.hasTicket) + assertFalse("stream should be empty", taken.findFirst().isPresent) + } + + @Test def streamTakeWhile_SomeMatch(): Unit = { + val expectedTakenCount = 3 + + val s = Stream.of( + Patron(hasTicket = yes, isRowdy = no), + Patron(hasTicket = yes, isRowdy = yes), + Patron(hasTicket = yes, isRowdy = no), + Patron(hasTicket = no, isRowdy = no), + Patron(hasTicket = yes, isRowdy = no), + Patron(hasTicket = no, isRowdy = yes), + Patron(hasTicket = yes, isRowdy = no) + ) + + val taken = s.takeWhile((e) => e.hasTicket) + + assertEquals("unexpected taken count", expectedTakenCount, taken.count()) + } + +} diff --git a/unit-tests/shared/src/test/require-scala3-jdk10/org/scalanative/testsuite/javalib/util/stream/CollectorsTestOnJDK10.scala b/unit-tests/shared/src/test/require-scala3-jdk10/org/scalanative/testsuite/javalib/util/stream/CollectorsTestOnJDK10.scala new file mode 100644 index 0000000000..9f5e5f5920 --- /dev/null +++ b/unit-tests/shared/src/test/require-scala3-jdk10/org/scalanative/testsuite/javalib/util/stream/CollectorsTestOnJDK10.scala @@ -0,0 +1,202 @@ +package org.scalanative.testsuite.javalib.util.stream + +import java.util.stream._ + +import java.{util => ju} +import java.util.ArrayList + +import java.util.stream.Collector.Characteristics + +import org.junit.Test +import org.junit.Assert._ + +import org.scalanative.testsuite.utils.AssertThrows.assertThrows + +class CollectorsTestOnJDK10 { + + private def requireEmptyCharacteristics( + differentia: ju.Set[Collector.Characteristics] + ): Unit = { + assertEquals( + s"unexpected extra characteristics: ${differentia}", + 0, + differentia.size() + ) + } + + private def requireUnorderedCharacteristicOnly( + differentia: ju.Set[Collector.Characteristics] + ): Unit = { + assertEquals("characteristics set size", 1, differentia.size()) + + assertTrue( + "Characteristics.UNORDERED is missing", + differentia.contains(Characteristics.UNORDERED) + ) + } + // Since: Java 10 + @Test def collectorsToUnmodifiableList(): Unit = { + val nElements = 7 + val sisters = new ArrayList[String](nElements) + sisters.add("Maya") + sisters.add("Electra") + sisters.add("Taygete") + sisters.add("Alcyone") + sisters.add("Celaeno") + sisters.add("Sterope") + sisters.add("Merope") + + val s = sisters.stream() + + val collector = Collectors.toUnmodifiableList[String]() + + requireEmptyCharacteristics(collector.characteristics()) + + val collected = s.collect(collector) + + assertEquals("list size", nElements, collected.size()) + + // Unmodifiable + assertThrows(classOf[UnsupportedOperationException], collected.remove(0)) + + // Proper elements, in encounter order + for (j <- 0 until nElements) + assertEquals("list element", sisters.get(j), collected.get(j)) + } + + // Since: Java 10 + @Test def collectorsToUnmodifiableMap_2Arg(): Unit = { + case class Employee(name: String, badgeNumber: Int) + + val nElements = 7 + + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Maya", 0)) + employees.add(Employee("Electra", 1)) + employees.add(Employee("Taygete", 2)) + employees.add(Employee("Alcyone", 3)) + employees.add(Employee("Celaeno", 4)) + employees.add(Employee("Sterope", 5)) + employees.add(Employee("Merope", 6)) + + val s = employees.stream() + + val collector = + Collectors.toUnmodifiableMap( + (e: Employee) => e.name, + (e: Employee) => e.badgeNumber + ) + + requireEmptyCharacteristics(collector.characteristics()) + + val map = s.collect(collector) + + assertEquals("count", nElements, map.size()) + // Unmodifiable + assertThrows(classOf[UnsupportedOperationException], map.remove(0)) + + map.forEach((k: String, v: Int) => + assertEquals( + s"contents: key: '${k}' value: ${v}", + employees.get(v).badgeNumber, + v + ) + ) + } + + // Since: Java 10 + @Test def collectorsToUnmodifiableMap_3Arg(): Unit = { + case class Employee(name: String, badgeNumber: Int) + + val nElements = 7 + + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Maya", 0)) + employees.add(Employee("Electra", 1)) + employees.add(Employee("Taygete", 2)) + employees.add(Employee("Alcyone", 3)) + employees.add(Employee("Merope", -6)) + employees.add(Employee("Sterope", 5)) + employees.add(Employee("Merope", 6)) + + // One entry, "Merope", will be merged. + val expectedCount = nElements - 1 + + val expectedReplacement = -36 + + val s = employees.stream() + + val collector = + Collectors.toUnmodifiableMap( + (e: Employee) => e.name, + (e: Employee) => e.badgeNumber, + (found1: Int, found2: Int) => found1 * found2 + ) + + requireEmptyCharacteristics(collector.characteristics()) + + val map = s.collect(collector) + + assertEquals("count", expectedCount, map.size()) + + // Unmodifiable + assertThrows(classOf[UnsupportedOperationException], map.remove(0)) + + map.forEach((k: String, v: Int) => + k match { + case k if (k == "Merope") => + assertEquals( + s"contents: key: '${k}' value: ${v}", + expectedReplacement, + v + ) + + case _ => + assertEquals( + s"contents: key: '${k}' value: ${v}", + employees.get(v).badgeNumber, + v + ) + } + ) + } + + // Since: Java 10 + @Test def collectorsToUnmodifiableSet(): Unit = { + val nElements = 7 + val sisters = new ArrayList[String](nElements) + sisters.add("Maya") + sisters.add("Electra") + sisters.add("Taygete") + sisters.add("Alcyone") + sisters.add("Celaeno") + sisters.add("Sterope") + sisters.add("Merope") + + val s = sisters.stream() + + val collector = Collectors.toUnmodifiableSet[String]() + + requireUnorderedCharacteristicOnly(collector.characteristics()) + + val collected = s.collect(collector) + + assertEquals("set size", nElements, collected.size()) + + // Unmodifiable + assertThrows( + classOf[UnsupportedOperationException], + collected.remove(sisters.get(0)) + ) + + // Proper elements + for (j <- 0 until nElements) { + val expected = sisters.get(j) + assertTrue( + "set element not in Set: ${expected}", + collected.contains(expected) + ) + } + } + +} diff --git a/unit-tests/shared/src/test/require-scala3-jdk12/org/scalanative/testsuite/javalib/util/stream/CollectorsTestOnJDK12.scala b/unit-tests/shared/src/test/require-scala3-jdk12/org/scalanative/testsuite/javalib/util/stream/CollectorsTestOnJDK12.scala new file mode 100644 index 0000000000..6b48c20b98 --- /dev/null +++ b/unit-tests/shared/src/test/require-scala3-jdk12/org/scalanative/testsuite/javalib/util/stream/CollectorsTestOnJDK12.scala @@ -0,0 +1,93 @@ +package org.scalanative.testsuite.javalib.util.stream + +import java.util.stream._ + +import java.{util => ju} +import java.util.ArrayList +import java.util.Arrays + +import org.junit.Test +import org.junit.Assert._ + +class CollectorsTestOnJDK12 { + + private def requireEmptyCharacteristics( + differentia: ju.Set[Collector.Characteristics] + ): Unit = { + assertEquals( + s"unexpected extra characteristics: ${differentia}", + 0, + differentia.size() + ) + } + + // Since: Java 12 + @Test def collectorsTeeing(): Unit = { + /* This implements one of the examples in the Java 19 description of the + * java.util.Collectors class: + * // Group employees by department + */ + + case class Employee(name: String, department: String) + + val nElements = 16 + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Employee_1", "OGO")) + employees.add(Employee("Employee_2", "TAY")) + employees.add(Employee("Employee_3", "LKG")) + employees.add(Employee("Employee_4", "ZKO")) + employees.add(Employee("Employee_5", "OGO")) + employees.add(Employee("Employee_6", "LKG")) + employees.add(Employee("Employee_7", "LKG")) + employees.add(Employee("Employee_8", "ZKO")) + employees.add(Employee("Employee_9", "ZKO")) + employees.add(Employee("Employee_10", "TAY")) + employees.add(Employee("Employee_11", "LKG")) + employees.add(Employee("Employee_12", "ZKO")) + employees.add(Employee("Employee_13", "OGO")) + employees.add(Employee("Employee_14", "ZKO")) + employees.add(Employee("Employee_15", "LKG")) + employees.add(Employee("Employee_16", "ZKO")) + + val s = employees.stream() + + val collector0 = + Collectors.teeing( + Collectors.counting(), + Collectors.filtering( + (e: Employee) => e.department == "LKG", + Collectors.counting() + ), + (r1, r2) => Arrays.asList(r1, r2) + ) + + /* The characteristics required of teeing() depends upon the + * characteristics of the two downstreams. Here, both are simple + * so expect zero characteristics. + * + * The tests for teeing() should be expanded to cover all four + * combinations of characteristics: None, CONCURRENT-only, + * UNORDERED-only, both CONCURRENT and UNORDERED. + */ + + requireEmptyCharacteristics(collector0.characteristics()) + + val teed = + s.collect( + Collectors.teeing( + Collectors.counting(), + Collectors.filtering( + (e: Employee) => e.department == "LKG", + Collectors.counting() + ), + (r1, r2) => Arrays.asList(r1, r2) + ) + ) + + assertEquals("teed size", 2, teed.size()) + + assertEquals("total employees", nElements.toLong, teed.get(0)) + assertEquals("LKG employees", 5L, teed.get(1)) + } + +} diff --git a/unit-tests/shared/src/test/require-scala3-jdk9/org/scalanative/testsuite/javalib/util/stream/CollectorsTestOnJDK9.scala b/unit-tests/shared/src/test/require-scala3-jdk9/org/scalanative/testsuite/javalib/util/stream/CollectorsTestOnJDK9.scala new file mode 100644 index 0000000000..14f5d50fa6 --- /dev/null +++ b/unit-tests/shared/src/test/require-scala3-jdk9/org/scalanative/testsuite/javalib/util/stream/CollectorsTestOnJDK9.scala @@ -0,0 +1,154 @@ +package org.scalanative.testsuite.javalib.util.stream + +import java.util.stream._ + +import java.{util => ju} +import java.util.ArrayList +import java.util.Map + +import java.util.stream.Collector.Characteristics + +import org.junit.Test +import org.junit.Assert._ + +class CollectorsTestOnJDK9 { + + private def requireEmptyCharacteristics( + differentia: ju.Set[Collector.Characteristics] + ): Unit = { + assertEquals( + s"unexpected extra characteristics: ${differentia}", + 0, + differentia.size() + ) + } + + private def requireAll3Characteristics( + differentia: ju.Set[Collector.Characteristics] + ): Unit = { + assertTrue( + "Characteristics.CONCURRENT is missing", + differentia.contains(Characteristics.CONCURRENT) + ) + + assertTrue( + "Characteristics.UNORDERED is missing", + differentia.contains(Characteristics.UNORDERED) + ) + + assertTrue( + "Characteristics.IDENTITY_FINISH is missing", + differentia.contains(Characteristics.IDENTITY_FINISH) + ) + } + + // Since: Java 9 + @Test def collectorsFiltering(): Unit = { + val nElements = 100 + val nEvenElements = nElements / 2 + + // K. F. Gauss formula for sum of even integers within a range. + val expectedFilteredSum = ((2 + 100) / 2) * nEvenElements + + val s = Stream + .iterate[Int](1, e => e + 1) + .limit(nElements) + + val collector = + Collectors.filtering( + (e: Int) => (e % 2 == 0), + Collectors.summingInt((e: Int) => e) + ) + + requireEmptyCharacteristics(collector.characteristics()) + + val sumOfEvens = s.collect(collector) + + assertEquals("unexpected filteredSum", expectedFilteredSum, sumOfEvens) + } + + @Test def collectorsFiltering_PreservesCharacteristics(): Unit = { + case class Employee(name: String, badgeNumber: Int) + + val collector1 = + Collectors.toConcurrentMap( + (e: Employee) => e.name, + (e: Employee) => e.badgeNumber + ) + + requireAll3Characteristics(collector1.characteristics()) + + // Pick a downstream that is now known to have characteristics. + val collector2 = + Collectors.filtering( + (e: Map.Entry[String, Int]) => (e.getValue() <= 3), + collector1 + ) + + // Are the downstreamCharacteristics inherited correctly? JVM does that. + requireAll3Characteristics(collector2.characteristics()) + } + + // Since: Java 9 + @Test def collectorsFlatMapping(): Unit = { + /* This implements one of the examples in the Java 19 description of the + * java.util.Collectors class: + * // Accumulate names into a List + */ + val nElements = 7 + val sisters = new ArrayList[String](nElements) + sisters.add("Maya") + sisters.add("Electra") + sisters.add("Taygete") + sisters.add("Alcyone") + sisters.add("Celaeno") + sisters.add("Sterope") + sisters.add("Merope") + + val expectedSum = 45 * 2 + + val s = sisters.stream() + + // A demo transformation just for the fun of it. + val collector = Collectors.flatMapping( + (e: String) => { + val n = e.length() + Stream.of(n, n) + }, + Collectors.summingInt((e: Int) => e) + ) + + requireEmptyCharacteristics(collector.characteristics()) + + val sum = s.collect(collector) + + assertEquals("sum", expectedSum, sum) + } + + @Test def collectorsFlatMapping_PreservesCharacteristics(): Unit = { + case class Employee(name: String, badgeNumber: Int) + + val collector1 = + Collectors.toConcurrentMap( + (e: Employee) => e.name, + (e: Employee) => e.badgeNumber + ) + + requireAll3Characteristics(collector1.characteristics()) + + // Pick a downstream that is now known to have characteristics. + val collector2 = + Collectors.flatMapping( + (e: Map.Entry[String, Int]) => + Stream.of( + Employee(e.getKey(), e.getValue()), + Employee(e.getValue().toString(), e.getValue() * 2) // nonesense + ), + collector1 + ) + + // Are the downstreamCharacteristics inherited correctly? JVM does that. + requireAll3Characteristics(collector2.characteristics()) + } + +} diff --git a/unit-tests/shared/src/test/scala-3/scala/org/scalanative/testsuite/javalib/util/stream/CollectorsTest.scala b/unit-tests/shared/src/test/scala-3/scala/org/scalanative/testsuite/javalib/util/stream/CollectorsTest.scala new file mode 100644 index 0000000000..692760e9f4 --- /dev/null +++ b/unit-tests/shared/src/test/scala-3/scala/org/scalanative/testsuite/javalib/util/stream/CollectorsTest.scala @@ -0,0 +1,1578 @@ +package org.scalanative.testsuite.javalib.util.stream + +import java.{util => ju} +import java.util._ + +import java.util.concurrent.ConcurrentMap +import java.util.concurrent.ConcurrentHashMap + +import java.util.function.Function +import java.util.function.BinaryOperator + +import java.util.stream._ +import java.util.stream.Collector.Characteristics + +import org.junit.Test +import org.junit.Assert._ + +import org.scalanative.testsuite.utils.AssertThrows.assertThrows + +/* This Test suite depends upon a competent Stream implementation. + * This file focuses on exercising the Collectors. + * Similar, overlapping, or identical Tests in StreamTest focus on exercising + * Streams. + */ + +/* Design Notes: + * + * 1) This file is restricted to running on Scala 3. + * + * It is, by explicit purpose, written to call Collectors + * as they are most likely to be used in the field: using brief, + * intentional, lambdas and few/no unnecessary type arguments. + * + * As such, they provide reasonable, not perfect, models for how + * Collectors may be used with relative ease. + * + * A person with too much time on their hands could write alternate + * code for Scala 2.13.*. Such has been done during development. + * It can be made to work but changes for the more interesting and + * complex uses of Collectors are just too ugly to publish as a use model. + * + * A person with entirely too much time on their hands could try to + * write alternate code for Scala 2.12.*. + * The changes required for Scala 2.12 are extensive and unlikely to + * repay the cost of making them. + * + * 2) Someday, after the correctness of both the underlying implementation + * and the Tests themselves has been shown, replication of various + * data structures and code paths may be collapsed to common code. + * + * Rough edges, partial list + * - Testing for the presence or absence of Characteristics is a good + * candidate for re-work. + * + * - The various variants & initializations of Employees classes + * should be checked for commonalities and possible consolidation. + */ + +class CollectorsTest { + + final val epsilon = 0.00001 // tolerance for Floating point comparisons. + + case class Student(name: String, grade: Int, salary: Double) + + private def createStdStudentList(): ArrayList[Student] = { + val nElements = 8 + val students = new ArrayList[Student](nElements) + students.add(Student("Student_1", 99, 87.03)) + students.add(Student("Student_2", 0, 16.18)) + students.add(Student("Student_3", 96, 91.94)) + students.add(Student("Student_4", 80, 35.12)) + students.add(Student("Student_5", 81, 7.75)) + students.add(Student("Student_6", 88, 63.69)) + students.add(Student("Student_7", 90, 79.19)) + students.add(Student("Student_8", 70, 49.15)) + + students + } + + case class UpcItem(name: String, upc: Int) + case class ValueItem(doubleValue: Double, longValue: Long, intValue: Int) + + private def requireEmptyCharacteristics( + differentia: ju.Set[Collector.Characteristics] + ): Unit = { + assertEquals( + s"unexpected extra characteristics: ${differentia}", + 0, + differentia.size() + ) + } + + private def requireIdentityCharacteristicOnly( + differentia: ju.Set[Collector.Characteristics] + ): Unit = { + assertEquals("characteristics set size", 1, differentia.size()) + + assertTrue( + "Characteristics.IDENTITY_FINISH is missing", + differentia.contains(Characteristics.IDENTITY_FINISH) + ) + } + + private def requireUnorderedCharacteristicOnly( + differentia: ju.Set[Collector.Characteristics] + ): Unit = { + assertEquals("characteristics set size", 1, differentia.size()) + + assertTrue( + "Characteristics.UNORDERED is missing", + differentia.contains(Characteristics.UNORDERED) + ) + } + + private def requireConcurrentUnorderedCharacteristicsOnly( + differentia: ju.Set[Collector.Characteristics] + ): Unit = { + assertEquals("characteristics set size", 2, differentia.size()) + + assertTrue( + "Characteristics.CONCURRENT is missing", + differentia.contains(Characteristics.CONCURRENT) + ) + + assertTrue( + "Characteristics.UNORDERED is missing", + differentia.contains(Characteristics.UNORDERED) + ) + } + + private def requireIdentityUnorderedCharacteristicOnly( + differentia: ju.Set[Collector.Characteristics] + ): Unit = { + assertEquals("characteristics set size", 2, differentia.size()) + + assertTrue( + "Characteristics.IDENTITY_FINISH is missing", + differentia.contains(Characteristics.IDENTITY_FINISH) + ) + + assertTrue( + "Characteristics.UNORDERED is missing", + differentia.contains(Characteristics.UNORDERED) + ) + } + + private def requireAll3Characteristics( + differentia: ju.Set[Collector.Characteristics] + ): Unit = { + assertTrue( + "Characteristics.CONCURRENT is missing", + differentia.contains(Characteristics.CONCURRENT) + ) + + assertTrue( + "Characteristics.UNORDERED is missing", + differentia.contains(Characteristics.UNORDERED) + ) + + assertTrue( + "Characteristics.IDENTITY_FINISH is missing", + differentia.contains(Characteristics.IDENTITY_FINISH) + ) + } + + @Test def collectorsAveragingDouble(): Unit = { + + val expectedAverage = 3.30 + + val nElements = 7 + val items = new ArrayList[ValueItem](nElements) + items.add(ValueItem(3.3, 3L, 3)) + items.add(ValueItem(2.2, 2L, 2)) + items.add(ValueItem(1.1, 1L, 1)) + items.add(ValueItem(4.4, 4L, 4)) + items.add(ValueItem(0.0, 0L, 0)) + items.add(ValueItem(6.6, 6L, 6)) + items.add(ValueItem(5.5, 5L, 5)) + + val s = items.stream() + + val collector = + Collectors.averagingDouble((e: ValueItem) => e.doubleValue) + + requireEmptyCharacteristics(collector.characteristics()) + + val average = s.collect(collector) + + assertEquals("average", expectedAverage, average, epsilon) + } + + @Test def collectorsAveragingInt(): Unit = { + + val expectedAverage = 46.0 + + val nElements = 7 + val items = new ArrayList[ValueItem](nElements) + items.add(ValueItem(3.3, 3L, 29)) + items.add(ValueItem(2.2, 2L, 66)) + items.add(ValueItem(1.1, 1L, 54)) + items.add(ValueItem(4.4, 4L, 15)) + items.add(ValueItem(0.0, 0L, 63)) + items.add(ValueItem(6.6, 6L, 82)) + items.add(ValueItem(5.5, 5L, 13)) + + val s = items.stream() + + val collector = + Collectors.averagingInt((e: ValueItem) => e.intValue) + + requireEmptyCharacteristics(collector.characteristics()) + + val average = s.collect(collector) + + assertEquals("average", expectedAverage, average, epsilon) + } + + @Test def collectorsAveragingLong(): Unit = { + + val expectedAverage = 50.4285714 + + val nElements = 7 + val items = new ArrayList[ValueItem](nElements) + items.add(ValueItem(3.3, 36L, 29)) + items.add(ValueItem(2.2, 32L, 66)) + items.add(ValueItem(1.1, 100L, 54)) + items.add(ValueItem(4.4, 84L, 15)) + items.add(ValueItem(0.0, 22L, 63)) + items.add(ValueItem(6.6, 45L, 82)) + items.add(ValueItem(5.5, 34L, 13)) + + val s = items.stream() + + val collector = + Collectors.averagingLong((e: ValueItem) => e.longValue) + + requireEmptyCharacteristics(collector.characteristics()) + + val average = s.collect(collector) + + assertEquals("average", expectedAverage, average, epsilon) + } + + @Test def collectorsCollectingAndThen(): Unit = { + val nElements = 20 + val nEvenElements = nElements / 2 + + // K. F. Gauss formula for sum of even integers within a range. + val sum = ((2 + 20) / 2) * nEvenElements + val expectedSumSquared = sum * sum + + val s = Stream + .iterate[Int](1, e => e + 1) + .limit(nElements) + + val collector = + Collectors.collectingAndThen( + Collectors.toList(), + (e: ju.List[Int]) => Collections.unmodifiableList(e) + ) + + requireEmptyCharacteristics(collector.characteristics()) + + val adamantine = s.collect(collector) + + assertEquals("list size", nElements, adamantine.size()) + + // Unmodifiable + assertThrows(classOf[UnsupportedOperationException], adamantine.remove(0)) + } + + @Test def collectorsCounting(): Unit = { + val nElements = 29 + + val s = Stream + .iterate[Int](1775, e => e + 1) + .limit(nElements) + + val collector = Collectors.counting[Int]() + + requireEmptyCharacteristics(collector.characteristics()) + + val count = s.collect(collector) + + assertEquals("unexpected count", nElements.toLong, count) + } + + @Test def collectorsGroupingBy_1Arg(): Unit = { + /* This implements one of the examples in the Java 19 description of the + * java.util.Collectors class: + * // Group employees by department + */ + + case class Employee(name: String, department: String) + + val nElements = 16 + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Employee_1", "OGO")) + employees.add(Employee("Employee_2", "TAY")) + employees.add(Employee("Employee_3", "LKG")) + employees.add(Employee("Employee_4", "ZKO")) + employees.add(Employee("Employee_5", "OGO")) + employees.add(Employee("Employee_6", "LKG")) + employees.add(Employee("Employee_7", "LKG")) + employees.add(Employee("Employee_8", "ZKO")) + employees.add(Employee("Employee_9", "ZKO")) + employees.add(Employee("Employee_10", "TAY")) + employees.add(Employee("Employee_11", "LKG")) + employees.add(Employee("Employee_12", "ZKO")) + employees.add(Employee("Employee_13", "OGO")) + employees.add(Employee("Employee_14", "ZKO")) + employees.add(Employee("Employee_15", "LKG")) + employees.add(Employee("Employee_16", "ZKO")) + + val s = employees.stream() + + val collector = + Collectors.groupingBy((e: Employee) => e.department) + + requireIdentityCharacteristicOnly(collector.characteristics()) + + val grouped = s.collect(collector) + + assertEquals("grouped size", 4, grouped.size()) + + val ogoEmployees = grouped.get("OGO") + assertEquals("grouped ogo size", 3, ogoEmployees.size()) + + val tayEmployees = grouped.get("TAY") + assertEquals("grouped tay size", 2, tayEmployees.size()) + + val lkgEmployees = grouped.get("LKG") + assertEquals("grouped lkg size", 5, lkgEmployees.size()) + + val zkoEmployees = grouped.get("ZKO") + assertEquals("grouped zko size", 6, zkoEmployees.size()) + + employees.forEach(e => + e.department match { + case "OGO" => + assertTrue( + s"missing OGO employee: ${e.name}", + grouped.get("OGO").contains(e) + ) + + case "TAY" => + assertTrue( + s"missing TAY employee: ${e.name}", + grouped.get("TAY").contains(e) + ) + + case "LKG" => + assertTrue( + s"missing LKG employee: ${e.name}", + grouped.get("LKG").contains(e) + ) + + case "ZKO" => + assertTrue( + s"missing ZKO employee: ${e.name}", + grouped.get("ZKO").contains(e) + ) + } + ) + } + + @Test def collectorsGroupingBy_2Arg(): Unit = { + /* This implements one of the examples in the Java 19 description of the + * java.util.Collectors class: + * // Compute sum of salaries by department + */ + + case class Employee(name: String, department: String, salary: Int) + + val nElements = 16 + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Employee_1", "OGO", 1606)) + employees.add(Employee("Employee_2", "TAY", 1505)) + employees.add(Employee("Employee_3", "LKG", 1404)) + employees.add(Employee("Employee_4", "ZKO", 1303)) + employees.add(Employee("Employee_5", "OGO", 1202)) + employees.add(Employee("Employee_6", "LKG", 1101)) + employees.add(Employee("Employee_7", "LKG", 1000)) + employees.add(Employee("Employee_8", "ZKO", 909)) + employees.add(Employee("Employee_9", "ZKO", 808)) + employees.add(Employee("Employee_10", "TAY", 707)) + employees.add(Employee("Employee_11", "LKG", 606)) + employees.add(Employee("Employee_12", "ZKO", 505)) + employees.add(Employee("Employee_13", "OGO", 404)) + employees.add(Employee("Employee_14", "ZKO", 303)) + employees.add(Employee("Employee_15", "LKG", 202)) + employees.add(Employee("Employee_16", "ZKO", 101)) + + val s = employees.stream() + + val collector = + Collectors.groupingBy( + (e: Employee) => e.department, + Collectors.summingInt((e: Employee) => e.salary) + ) + + requireEmptyCharacteristics(collector.characteristics()) + + val grouped = s.collect(collector) + + assertEquals("grouped size", 4, grouped.size()) + + val ogoEmployees = grouped.get("OGO") + assertEquals("ogo salary", 3212, ogoEmployees) + + val tayEmployees = grouped.get("TAY") + assertEquals("tay salary", 2212, tayEmployees) + + val lkgEmployees = grouped.get("LKG") + assertEquals("lkg salary", 4313, lkgEmployees) + + val zkoEmployees = grouped.get("ZKO") + assertEquals("zko salary", 3929, zkoEmployees) + } + + @Test def collectorsGroupingBy_3Arg(): Unit = { + /* This implements one of the examples in the Java 19 description of the + * java.util.Collectors class (using groupingBy with 3 arguments): + * // Compute sum of salaries by department + */ + + case class Employee(name: String, department: String, salary: Int) + + val nElements = 16 + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Employee_1", "OGO", 1606)) + employees.add(Employee("Employee_2", "TAY", 1505)) + employees.add(Employee("Employee_3", "LKG", 1404)) + employees.add(Employee("Employee_4", "ZKO", 1303)) + employees.add(Employee("Employee_5", "OGO", 1202)) + employees.add(Employee("Employee_6", "LKG", 1101)) + employees.add(Employee("Employee_7", "LKG", 1000)) + employees.add(Employee("Employee_8", "ZKO", 909)) + employees.add(Employee("Employee_9", "ZKO", 808)) + employees.add(Employee("Employee_10", "TAY", 707)) + employees.add(Employee("Employee_11", "LKG", 606)) + employees.add(Employee("Employee_12", "ZKO", 505)) + employees.add(Employee("Employee_13", "OGO", 404)) + employees.add(Employee("Employee_14", "ZKO", 303)) + employees.add(Employee("Employee_15", "LKG", 202)) + employees.add(Employee("Employee_16", "ZKO", 101)) + + val s = employees.stream() + + // Note Well: + // Collectors.summingInt() returns an Integer, not a primitive Int. + + val collector = + Collectors.groupingBy( + (e: Employee) => e.department, + () => new TreeMap[String, Integer], + Collectors.summingInt((e: Employee) => e.salary) + ) + + requireEmptyCharacteristics(collector.characteristics()) + + val grouped = s.collect(collector) + + assertEquals("grouped size", 4, grouped.size()) + + val ogoEmployees = grouped.get("OGO") + assertEquals("ogo salary", 3212, ogoEmployees) + + val tayEmployees = grouped.get("TAY") + assertEquals("tay salary", 2212, tayEmployees) + + val lkgEmployees = grouped.get("LKG") + assertEquals("lkg salary", 4313, lkgEmployees) + + val zkoEmployees = grouped.get("ZKO") + assertEquals("zko salary", 3929, zkoEmployees) + } + + @Test def collectorsGroupingByConcurrent_1Arg(): Unit = { + case class Employee(name: String, department: String) + + val nElements = 16 + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Employee_1", "OGO")) + employees.add(Employee("Employee_2", "TAY")) + employees.add(Employee("Employee_3", "LKG")) + employees.add(Employee("Employee_4", "ZKO")) + employees.add(Employee("Employee_5", "OGO")) + employees.add(Employee("Employee_6", "LKG")) + employees.add(Employee("Employee_7", "LKG")) + employees.add(Employee("Employee_8", "ZKO")) + employees.add(Employee("Employee_9", "ZKO")) + employees.add(Employee("Employee_10", "TAY")) + employees.add(Employee("Employee_11", "LKG")) + employees.add(Employee("Employee_12", "ZKO")) + employees.add(Employee("Employee_13", "OGO")) + employees.add(Employee("Employee_14", "ZKO")) + employees.add(Employee("Employee_15", "LKG")) + employees.add(Employee("Employee_16", "ZKO")) + + val s = employees.stream() + + val collector = + Collectors.groupingByConcurrent((e: Employee) => e.department) + + requireAll3Characteristics(collector.characteristics()) + + val grouped = s.collect(collector) + + assertEquals("grouped size", 4, grouped.size()) + + val ogoEmployees = grouped.get("OGO") + assertEquals("grouped ogo size", 3, ogoEmployees.size()) + + val tayEmployees = grouped.get("TAY") + assertEquals("grouped tay size", 2, tayEmployees.size()) + + val lkgEmployees = grouped.get("LKG") + assertEquals("grouped lkg size", 5, lkgEmployees.size()) + + val zkoEmployees = grouped.get("ZKO") + assertEquals("grouped zko size", 6, zkoEmployees.size()) + + employees.forEach(e => + e.department match { + case "OGO" => + assertTrue( + s"missing OGO employee: ${e.name}", + grouped.get("OGO").contains(e) + ) + + case "TAY" => + assertTrue( + s"missing TAY employee: ${e.name}", + grouped.get("TAY").contains(e) + ) + + case "LKG" => + assertTrue( + s"missing LKG employee: ${e.name}", + grouped.get("LKG").contains(e) + ) + + case "ZKO" => + assertTrue( + s"missing ZKO employee: ${e.name}", + grouped.get("ZKO").contains(e) + ) + } + ) + } + + @Test def collectorsGroupingByConcurrent_2Arg(): Unit = { + case class Employee(name: String, department: String, salary: Int) + + val nElements = 16 + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Employee_1", "OGO", 1606)) + employees.add(Employee("Employee_2", "TAY", 1505)) + employees.add(Employee("Employee_3", "LKG", 1404)) + employees.add(Employee("Employee_4", "ZKO", 1303)) + employees.add(Employee("Employee_5", "OGO", 1202)) + employees.add(Employee("Employee_6", "LKG", 1101)) + employees.add(Employee("Employee_7", "LKG", 1000)) + employees.add(Employee("Employee_8", "ZKO", 909)) + employees.add(Employee("Employee_9", "ZKO", 808)) + employees.add(Employee("Employee_10", "TAY", 707)) + employees.add(Employee("Employee_11", "LKG", 606)) + employees.add(Employee("Employee_12", "ZKO", 505)) + employees.add(Employee("Employee_13", "OGO", 404)) + employees.add(Employee("Employee_14", "ZKO", 303)) + employees.add(Employee("Employee_15", "LKG", 202)) + employees.add(Employee("Employee_16", "ZKO", 101)) + + val s = employees.stream() + + val collector = + Collectors.groupingByConcurrent( + (e: Employee) => e.department, + Collectors.summingInt((e: Employee) => e.salary) + ) + + requireConcurrentUnorderedCharacteristicsOnly(collector.characteristics()) + + val grouped = s.collect(collector) + + assertEquals("grouped size", 4, grouped.size()) + + val ogoEmployees = grouped.get("OGO") + assertEquals("ogo salary", 3212, ogoEmployees) + + val tayEmployees = grouped.get("TAY") + assertEquals("tay salary", 2212, tayEmployees) + + val lkgEmployees = grouped.get("LKG") + assertEquals("lkg salary", 4313, lkgEmployees) + + val zkoEmployees = grouped.get("ZKO") + assertEquals("zko salary", 3929, zkoEmployees) + } + + @Test def collectorsGroupingByConcurrent_3Arg(): Unit = { + case class Employee(name: String, department: String, salary: Int) + + val nElements = 16 + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Employee_1", "OGO", 1606)) + employees.add(Employee("Employee_2", "TAY", 1505)) + employees.add(Employee("Employee_3", "LKG", 1404)) + employees.add(Employee("Employee_4", "ZKO", 1303)) + employees.add(Employee("Employee_5", "OGO", 1202)) + employees.add(Employee("Employee_6", "LKG", 1101)) + employees.add(Employee("Employee_7", "LKG", 1000)) + employees.add(Employee("Employee_8", "ZKO", 909)) + employees.add(Employee("Employee_9", "ZKO", 808)) + employees.add(Employee("Employee_10", "TAY", 707)) + employees.add(Employee("Employee_11", "LKG", 606)) + employees.add(Employee("Employee_12", "ZKO", 505)) + employees.add(Employee("Employee_13", "OGO", 404)) + employees.add(Employee("Employee_14", "ZKO", 303)) + employees.add(Employee("Employee_15", "LKG", 202)) + employees.add(Employee("Employee_16", "ZKO", 101)) + + val s = employees.stream() + + // Note Well: + // Collectors.summingInt() returns an Integer, not a primitive Int. + + val collector = + Collectors.groupingByConcurrent( + (e: Employee) => e.department, + () => new ConcurrentHashMap[String, Integer], + Collectors.summingInt((e: Employee) => e.salary) + ) + + requireConcurrentUnorderedCharacteristicsOnly(collector.characteristics()) + + val grouped = s.collect(collector) + + assertEquals("grouped size", 4, grouped.size()) + + val ogoEmployees = grouped.get("OGO") + assertEquals("ogo salary", 3212, ogoEmployees) + + val tayEmployees = grouped.get("TAY") + assertEquals("tay salary", 2212, tayEmployees) + + val lkgEmployees = grouped.get("LKG") + assertEquals("lkg salary", 4313, lkgEmployees) + + val zkoEmployees = grouped.get("ZKO") + assertEquals("zko salary", 3929, zkoEmployees) + } + + // Empty stream case handled in collectorsJoining_3Arg_EmptyStream Test + + @Test def collectorsJoining(): Unit = { + val expected = "Thequickbrownfox" + + val s = Stream.of("The", "quick", "brown", "fox") + + val collector = Collectors.joining() + + requireEmptyCharacteristics(collector.characteristics()) + + val joined = s.collect(collector) + + assertEquals("unexpected joined", expected, joined) + } + + // Empty stream case handled in collectorsJoining_3Arg_EmptyStream Test + + @Test def collectorsJoining_1Arg(): Unit = { + val expected = "The/quick/brown/fox" + + val s = Stream.of("The", "quick", "brown", "fox") + + val collector = Collectors.joining("/") + + requireEmptyCharacteristics(collector.characteristics()) + + val joined = s.collect(collector) + + assertEquals("unexpected joined", expected, joined) + } + + @Test def collectorsJoining_3Arg_EmptyStream(): Unit = { + val prefix = "prefix~" + val suffix = "~suffix" + + val expected = s"${prefix}${suffix}" + + val s = Stream.empty[String] + + val collector = Collectors.joining(" ", prefix, suffix) + + requireEmptyCharacteristics(collector.characteristics()) + + val joined = s.collect(collector) + + assertEquals("unexpected joined", expected, joined) + } + + @Test def collectorsJoining_3Arg(): Unit = { + val prefix = "Dies irae, dies illa, " + val body = "Solvetsaeclum in favilla:" + val suffix = " Teste David cum Sibylla." + + val expected = s"${prefix}${body}${suffix}" + + val s = Stream.of("Solvetsaeclum", "in", "favilla:") + + val collector = Collectors.joining(" ", prefix, suffix) + + requireEmptyCharacteristics(collector.characteristics()) + + val joined = s.collect(collector) + + assertEquals("unexpected joined", expected, joined) + } + + @Test def collectorsMapping(): Unit = { + /* This implements one of the examples in the Java 19 description of the + * java.util.Collectors class: + * // Accumulate names into a List + */ + val nElements = 7 + val sisters = new ArrayList[String](nElements) + sisters.add("Maya") + sisters.add("Electra") + sisters.add("Taygete") + sisters.add("Alcyone") + sisters.add("Celaeno") + sisters.add("Sterope") + sisters.add("Merope") + + val expectedSum = 45 + + val s = sisters.stream() + + // A demo transformation just for the fun of it. + val collector = Collectors.mapping( + (e: String) => e.length(), + Collectors.summingInt((e: Int) => e) + ) + + requireEmptyCharacteristics(collector.characteristics()) + + val sum = s.collect(collector) + + assertEquals("sum", expectedSum, sum) + } + + @Test def collectorsMapping_PreservesCharacteristics(): Unit = { + case class Employee(name: String, badgeNumber: Int) + + val collector1 = + Collectors.toConcurrentMap( + (e: Employee) => e.name, + (e: Employee) => e.badgeNumber + ) + + requireAll3Characteristics(collector1.characteristics()) + + // Pick a downstream that is now known to have characteristics. + val collector2 = + Collectors.mapping( + (e: Map.Entry[String, Int]) => Employee(e.getKey(), e.getValue()), + collector1 + ) + + // Are the downstreamCharacteristics inherited correctly? JVM does that. + requireAll3Characteristics(collector2.characteristics()) + } + + @Test def collectorsMaxBy(): Unit = { + val itemComparator = new ju.Comparator[UpcItem] { + def compare(item1: UpcItem, item2: UpcItem): Int = + item1.upc - item2.upc + } + + val nElements = 7 + val items = new ArrayList[UpcItem](nElements) + items.add(UpcItem("Maya", 1)) + items.add(UpcItem("Electra", 2)) + items.add(UpcItem("Taygete", 3)) + items.add(UpcItem("Alcyone", 4)) + items.add(UpcItem("Celaeno", 5)) + items.add(UpcItem("Sterope", 6)) + items.add(UpcItem("Merope", 7)) + + val s = items.stream() + + val collector = Collectors.maxBy(itemComparator) + + requireEmptyCharacteristics(collector.characteristics()) + + val maxOpt: Optional[UpcItem] = s.collect(collector) + + assertTrue("max not found", maxOpt.isPresent) + + assertEquals( + "wrong max item found", + items.get(nElements - 1).name, + maxOpt.get().name + ) + } + + @Test def collectorsMinBy(): Unit = { + val itemComparator = new ju.Comparator[UpcItem] { + def compare(item1: UpcItem, item2: UpcItem): Int = + item1.name.compareTo(item2.name) + } + + val nElements = 7 + val items = new ArrayList[UpcItem](nElements) + items.add(UpcItem("Maya", 1)) + items.add(UpcItem("Electra", 2)) + items.add(UpcItem("Taygete", 3)) + items.add(UpcItem("Alcyone", 4)) + items.add(UpcItem("Celaeno", 5)) + items.add(UpcItem("Sterope", 6)) + items.add(UpcItem("Merope", 7)) + + val expectedMinName = items.get(3).name + + val s = items.stream() + + val collector = Collectors.minBy(itemComparator) + + requireEmptyCharacteristics(collector.characteristics()) + + val minOpt: Optional[UpcItem] = s.collect(collector) + + assertTrue("min not found", minOpt.isPresent) + + assertEquals( + "wrong min item found", + expectedMinName, + minOpt.get().name + ) + } + + @Test def collectorsPartitioningBy_1Arg(): Unit = { + /* This implements one of the examples in the Java 19 description of the + * java.util.Collectors class: + * // Partition students into passing and failing + */ + + val expectedPassingCount = 6 + val expectedFailingCount = 2 + + val passThreshold = 80 + + val students = createStdStudentList() + val s = students.stream() + + val collector = + Collectors.partitioningBy((s: Student) => s.grade >= passThreshold) + + requireIdentityCharacteristicOnly(collector.characteristics()) + + val partitions = s.collect(collector) + + assertEquals("partitions size", 2, partitions.size()) + + val passingStudents = partitions.get(true) + assertEquals( + "partition passing size", + expectedPassingCount, + passingStudents.size() + ) + + val failingStudents = partitions.get(false) + assertEquals( + "partition failing size", + expectedFailingCount, + failingStudents.size() + ) + + students.forEach(s => { + if (s.grade >= passThreshold) + assertTrue( + s"missing passing student: ${s.name}", + passingStudents.contains(s) + ) + else { + assertTrue( + s"missing failing student: ${s.name}", + failingStudents.contains(s) + ) + + } + }) + } + + @Test def collectorsPartitioningBy_2Arg(): Unit = { + /* This merges two of the examples in the Java 19 description of the + * java.util.Collectors class: + * // Partition students into passing and failing + * // Compute sum of salaries by department + * The "Compute sum" example uses Int for salary. This Test uses Double. + */ + + val expectedPassingSalary = 364.72 + val expectedFailingSalary = 65.33 + + val passThreshold = 80 + + val students = createStdStudentList() + val s = students.stream() + + val collector = + Collectors.partitioningBy( + (s: Student) => s.grade >= passThreshold, + Collectors.summingDouble((s: Student) => s.salary) + ) + + requireEmptyCharacteristics(collector.characteristics()) + + val partitions = s.collect(collector) + + assertEquals("partitions size", 2, partitions.size()) + + assertEquals( + "partition passing", + expectedPassingSalary, + partitions.get(true), + epsilon + ) + + assertEquals( + "partition failing", + expectedFailingSalary, + partitions.get(false), + epsilon + ) + + } + + @Test def collectorsReducing_1Arg(): Unit = { + val expectedSum = 210 + val nElements = 20 + val s = Stream + .iterate[Int](1, e => e + 1) + .limit(nElements) + + val collector = Collectors.reducing((e1: Int, e2: Int) => e1 + e2) + + requireEmptyCharacteristics(collector.characteristics()) + + val reducedOpt = s.collect(collector) + + assertTrue("unexpected empty optional", reducedOpt.isPresent()) + assertEquals("reduced sum", expectedSum, reducedOpt.get()) + } + + @Test def collectorsReducing_2Arg(): Unit = { + + val identity = 0 + + val s = Stream.empty[Int]() + + val collector = + Collectors.reducing( + identity, + (e1: Int, e2: Int) => -1 + ) + + requireEmptyCharacteristics(collector.characteristics()) + + val reduced = s.collect(collector) + + assertEquals("reduced sum", identity, reduced) + } + + @Test def collectorsReducing_3Arg(): Unit = { + val identity = 0 + val expectedSum = 420 + + val nElements = 20 + val s = Stream + .iterate[Int](1, e => e + 1) + .limit(nElements) + + val collector = + Collectors.reducing( + identity, + (e: Int) => e * 2, + (e1: Int, e2: Int) => e1 + e2 + ) + + requireEmptyCharacteristics(collector.characteristics()) + + val reduced = s.collect(collector) + + assertNotEquals("unexpected identity value", identity, reduced) + + assertEquals("reduced sum", expectedSum, reduced) + } + + @Test def collectorsSummarizingDouble(): Unit = { + + val nElements = 7 + val expectedSum = 23.1 + val expectedMin = 0.0 + val expectedAverage = expectedSum / nElements + val expectedMax = 6.6 + + val items = new ArrayList[ValueItem](nElements) + items.add(ValueItem(3.3, 3L, 3)) + items.add(ValueItem(2.2, 2L, 2)) + items.add(ValueItem(1.1, 1L, 1)) + items.add(ValueItem(4.4, 4L, 4)) + items.add(ValueItem(0.0, 0L, 0)) + items.add(ValueItem(6.6, 6L, 6)) + items.add(ValueItem(5.5, 5L, 5)) + + val s = items.stream() + + val collector = + Collectors.summarizingDouble((e: ValueItem) => e.doubleValue) + + requireIdentityCharacteristicOnly(collector.characteristics()) + + val summary = s.collect(collector) + + // Proper stats + assertEquals("count", nElements, summary.getCount()) + assertEquals("sum", expectedSum, summary.getSum(), epsilon) + assertEquals("min", expectedMin, summary.getMin(), epsilon) + assertEquals("average", expectedAverage, summary.getAverage(), epsilon) + assertEquals("max", expectedMax, summary.getMax(), epsilon) + } + + @Test def collectorsSummarizingInt(): Unit = { + + val nElements = 7 + val expectedSum = 322 + val expectedMin = 13 + val expectedAverage = expectedSum / (nElements * 1.0) + val expectedMax = 82 + + val items = new ArrayList[ValueItem](nElements) + items.add(ValueItem(3.3, 3L, 29)) + items.add(ValueItem(2.2, 2L, 66)) + items.add(ValueItem(1.1, 1L, 54)) + items.add(ValueItem(4.4, 4L, 15)) + items.add(ValueItem(0.0, 0L, 63)) + items.add(ValueItem(6.6, 6L, 82)) + items.add(ValueItem(5.5, 5L, 13)) + + val s = items.stream() + + val collector = + Collectors.summarizingInt((e: ValueItem) => e.intValue) + + requireIdentityCharacteristicOnly(collector.characteristics()) + + val summary = s.collect(collector) + + // Proper stats + assertEquals("count", nElements, summary.getCount()) + assertEquals("sum", expectedSum, summary.getSum()) + assertEquals("min", expectedMin, summary.getMin()) + assertEquals("average", expectedAverage, summary.getAverage(), epsilon) + assertEquals("max", expectedMax, summary.getMax()) + } + + @Test def collectorsSummarizingLong(): Unit = { + + val nElements = 7 + val expectedSum = 353L + val expectedMin = 22L + val expectedAverage = expectedSum / (nElements * 1.0) + val expectedMax = 100L + + val items = new ArrayList[ValueItem](nElements) + items.add(ValueItem(3.3, 36L, 29)) + items.add(ValueItem(2.2, 32L, 66)) + items.add(ValueItem(1.1, 100L, 54)) + items.add(ValueItem(4.4, 84L, 15)) + items.add(ValueItem(0.0, 22L, 63)) + items.add(ValueItem(6.6, 45L, 82)) + items.add(ValueItem(5.5, 34L, 13)) + + val s = items.stream() + + val collector = + Collectors.summarizingLong((e: ValueItem) => e.longValue) + + requireIdentityCharacteristicOnly(collector.characteristics()) + + val summary = s.collect(collector) + + // Proper stats + assertEquals("count", nElements, summary.getCount()) + assertEquals("sum", expectedSum, summary.getSum()) + assertEquals("min", expectedMin, summary.getMin()) + assertEquals("average", expectedAverage, summary.getAverage(), epsilon) + assertEquals("max", expectedMax, summary.getMax()) + } + + @Test def collectorsSummingDouble(): Unit = { + + val nElements = 7 + val expectedSum = 23.1 + + val items = new ArrayList[ValueItem](nElements) + items.add(ValueItem(3.3, 3L, 3)) + items.add(ValueItem(2.2, 2L, 2)) + items.add(ValueItem(1.1, 1L, 1)) + items.add(ValueItem(4.4, 4L, 4)) + items.add(ValueItem(0.0, 0L, 0)) + items.add(ValueItem(6.6, 6L, 6)) + items.add(ValueItem(5.5, 5L, 5)) + + val s = items.stream() + + val collector = + Collectors.summingDouble((e: ValueItem) => e.doubleValue) + + requireEmptyCharacteristics(collector.characteristics()) + + val sum = s.collect(collector) + + assertEquals("sum", expectedSum, sum, epsilon) + } + + @Test def collectorsSummingInt(): Unit = { + + val nElements = 7 + val expectedSum = 322 + + val items = new ArrayList[ValueItem](nElements) + items.add(ValueItem(3.3, 3L, 29)) + items.add(ValueItem(2.2, 2L, 66)) + items.add(ValueItem(1.1, 1L, 54)) + items.add(ValueItem(4.4, 4L, 15)) + items.add(ValueItem(0.0, 0L, 63)) + items.add(ValueItem(6.6, 6L, 82)) + items.add(ValueItem(5.5, 5L, 13)) + + val s = items.stream() + + val collector = + Collectors.summingInt((e: ValueItem) => e.intValue) + + requireEmptyCharacteristics(collector.characteristics()) + + val sum = s.collect(collector) + + assertEquals("sum", expectedSum, sum) + } + + @Test def collectorsSummingLong(): Unit = { + + val nElements = 7 + val expectedSum = 353L + + val items = new ArrayList[ValueItem](nElements) + items.add(ValueItem(3.3, 36L, 29)) + items.add(ValueItem(2.2, 32L, 66)) + items.add(ValueItem(1.1, 100L, 54)) + items.add(ValueItem(4.4, 84L, 15)) + items.add(ValueItem(0.0, 22L, 63)) + items.add(ValueItem(6.6, 45L, 82)) + items.add(ValueItem(5.5, 34L, 13)) + + val s = items.stream() + + val collector = + Collectors.summingLong((e: ValueItem) => e.longValue) + + requireEmptyCharacteristics(collector.characteristics()) + + val sum = s.collect(collector) + + assertEquals("sum", expectedSum, sum) + } + + @Test def collectorsToMap_2Arg(): Unit = { + case class Employee(name: String, badgeNumber: Int) + + val nElements = 7 + + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Maya", 0)) + employees.add(Employee("Electra", 1)) + employees.add(Employee("Taygete", 2)) + employees.add(Employee("Alcyone", 3)) + employees.add(Employee("Celaeno", 4)) + employees.add(Employee("Sterope", 5)) + employees.add(Employee("Merope", 6)) + + val s = employees.stream() + + val collector = + Collectors.toMap((e: Employee) => e.name, (e: Employee) => e.badgeNumber) + + requireIdentityCharacteristicOnly(collector.characteristics()) + + val map = s.collect(collector) + + assertEquals("count", nElements, map.size()) + + map.forEach((k: String, v: Int) => + assertEquals( + s"contents: key: '${k}' value: ${v}", + employees.get(v).badgeNumber, + v + ) + ) + } + + @Test def collectorsToMap_3Arg(): Unit = { + case class Employee(name: String, badgeNumber: Int) + + val nElements = 7 + + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Maya", 0)) + employees.add(Employee("Electra", 1)) + employees.add(Employee("Taygete", 2)) + employees.add(Employee("Alcyone", 3)) + employees.add(Employee("Merope", -6)) + employees.add(Employee("Sterope", 5)) + employees.add(Employee("Merope", 6)) + + val expectedCount = nElements - 1 // One entry, "Merope", will be merged. + + val expectedReplacement = -36 + + val s = employees.stream() + + val collector = + Collectors.toMap( + (e: Employee) => e.name, + (e: Employee) => e.badgeNumber, + (found1: Int, found2: Int) => found1 * found2 + ) + + requireIdentityCharacteristicOnly(collector.characteristics()) + + val map = s.collect(collector) + + assertEquals("count", expectedCount, map.size()) + + map.forEach((k: String, v: Int) => + k match { + case k if (k == "Merope") => + assertEquals( + s"contents: key: '${k}' value: ${v}", + expectedReplacement, + v + ) + + case _ => + assertEquals( + s"contents: key: '${k}' value: ${v}", + employees.get(v).badgeNumber, + v + ) + } + ) + } + + @Test def collectorsToMap_4Arg(): Unit = { + case class Employee(name: String, badgeNumber: Int) + + val nElements = 7 + + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Maya", 0)) + employees.add(Employee("Electra", 1)) + employees.add(Employee("Taygete", 2)) + employees.add(Employee("Alcyone", 3)) + employees.add(Employee("Merope", -6)) + employees.add(Employee("Sterope", 5)) + employees.add(Employee("Merope", 6)) + + val expectedCount = nElements - 1 // One entry, "Merope", will be merged. + + val expectedReplacement = -36 + + val s = employees.stream() + + val collector = + Collectors.toMap( + (e: Employee) => e.name, + (e: Employee) => e.badgeNumber, + (found1: Int, found2: Int) => found1 * found2, + () => new HashMap[String, Int] + ) + + requireIdentityCharacteristicOnly(collector.characteristics()) + + val map = s.collect(collector) + + assertEquals("count", expectedCount, map.size()) + + map.forEach((k: String, v: Int) => + k match { + case k if (k == "Merope") => + assertEquals( + s"contents: key: '${k}' value: ${v}", + expectedReplacement, + v + ) + + case _ => + assertEquals( + s"contents: key: '${k}' value: ${v}", + employees.get(v).badgeNumber, + v + ) + } + ) + } + + /* toCollection() use case URL: + * https://stackoverflow.com/questions/21697349/ + * using-streams-to-collect-into-treeset-with-custom-comparator + */ + + @Test def collectorsToCollection(): Unit = { + /* This implements one of the examples in the Java 19 description of the + * java.util.Collectors class: + * // Accumulate names into a TreeSet + */ + + case class TimeStamp(name: String, stamp: Long, index: Int) + + val nTimeStamps = 7 + val timestamps = new ArrayList[TimeStamp](nTimeStamps) + // Ensure that the timestamps are not inserted in sorted or reverse order. + timestamps.add(TimeStamp("Prime", 3, 0)) + timestamps.add(TimeStamp("Matins", 1, 1)) + timestamps.add(TimeStamp("Compline", 7, 2)) + timestamps.add(TimeStamp("Terce", 4, 3)) + timestamps.add(TimeStamp("Lauds", 2, 4)) + timestamps.add(TimeStamp("Nones", 6, 5)) + timestamps.add(TimeStamp("Sext", 5, 6)) + + val expectedSet = new TreeSet[TimeStamp]() + + val s = timestamps.stream() + + val collector = + Collectors.toCollection(() => + new TreeSet[TimeStamp]( + Comparator.comparingLong((e) => e.asInstanceOf[TimeStamp].stamp) + ) + ) + + requireIdentityCharacteristicOnly(collector.characteristics()) + + val treeSet: TreeSet[TimeStamp] = s.collect(collector) + + assertEquals( + "TreeSet has wrong number of elements", + nTimeStamps, + treeSet.size() + ) + + treeSet + .spliterator() + .forEachRemaining((e) => + assertEquals( + "unexpected element", + timestamps.get(e.index).name, + e.name + ) + ) + } + + @Test def collectorsToConcurrentMap_2Arg(): Unit = { + case class Employee(name: String, badgeNumber: Int) + + val nElements = 7 + + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Maya", 0)) + employees.add(Employee("Electra", 1)) + employees.add(Employee("Taygete", 2)) + employees.add(Employee("Alcyone", 3)) + employees.add(Employee("Celaeno", 4)) + employees.add(Employee("Sterope", 5)) + employees.add(Employee("Merope", 6)) + + val s = employees.stream() + + val collector = Collectors.toConcurrentMap( + (e: Employee) => e.name, + (e: Employee) => e.badgeNumber + ) + + requireAll3Characteristics(collector.characteristics()) + + val map = s.collect(collector) + + assertEquals("count", nElements, map.size()) + + map.forEach((k: String, v: Int) => + assertEquals( + s"contents: key: '${k}' value: ${v}", + employees.get(v).badgeNumber, + v + ) + ) + } + + @Test def collectorsToConcurrentMap_3Arg(): Unit = { + case class Employee(name: String, badgeNumber: Int) + + val nElements = 7 + + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Maya", 0)) + employees.add(Employee("Electra", 1)) + employees.add(Employee("Taygete", 2)) + employees.add(Employee("Alcyone", 3)) + employees.add(Employee("Merope", -6)) + employees.add(Employee("Sterope", 5)) + employees.add(Employee("Merope", 6)) + + val expectedCount = nElements - 1 // One entry, "Merope", will be merged. + + val expectedReplacement = -36 + + val s = employees.stream() + + val collector = Collectors.toConcurrentMap( + (e: Employee) => e.name, + (e: Employee) => e.badgeNumber, + (found1: Int, found2: Int) => found1 * found2 + ) + + requireAll3Characteristics(collector.characteristics()) + + val map = s.collect(collector) + + assertEquals("count", expectedCount, map.size()) + + map.forEach((k: String, v: Int) => + k match { + case k if (k == "Merope") => + assertEquals( + s"contents: key: '${k}' value: ${v}", + expectedReplacement, + v + ) + + case _ => + assertEquals( + s"contents: key: '${k}' value: ${v}", + employees.get(v).badgeNumber, + v + ) + } + ) + } + + @Test def collectorsToConcurrentMap_4Arg(): Unit = { + case class Employee(name: String, badgeNumber: Int) + + val nElements = 7 + + val employees = new ArrayList[Employee](nElements) + employees.add(Employee("Maya", 0)) + employees.add(Employee("Electra", 1)) + employees.add(Employee("Taygete", 2)) + employees.add(Employee("Alcyone", 3)) + employees.add(Employee("Merope", -6)) + employees.add(Employee("Sterope", 5)) + employees.add(Employee("Merope", 6)) + + val expectedCount = nElements - 1 // One entry, "Merope", will be merged. + + val expectedReplacement = -36 + + val s = employees.stream() + + val collector = + Collectors.toConcurrentMap( + (e: Employee) => e.name, + (e: Employee) => e.badgeNumber, + (found1: Int, found2: Int) => found1 * found2, + () => new ConcurrentHashMap[String, Int] + ) + + requireAll3Characteristics(collector.characteristics()) + + val map = s.collect(collector) + + assertEquals("count", expectedCount, map.size()) + + map.forEach((k: String, v: Int) => + k match { + case k if (k == "Merope") => + assertEquals( + s"contents: key: '${k}' value: ${v}", + expectedReplacement, + v + ) + + case _ => + assertEquals( + s"contents: key: '${k}' value: ${v}", + employees.get(v).badgeNumber, + v + ) + } + ) + } + + @Test def collectorsToList(): Unit = { + /* This implements one of the examples in the Java 19 description of the + * java.util.Collectors class: + * // Accumulate names into a List + */ + val nElements = 7 + val sisters = new ArrayList[String](nElements) + sisters.add("Maya") + sisters.add("Electra") + sisters.add("Taygete") + sisters.add("Alcyone") + sisters.add("Celaeno") + sisters.add("Sterope") + sisters.add("Merope") + + val s = sisters.stream() + + val collector = Collectors.toList[String]() + + requireIdentityCharacteristicOnly(collector.characteristics()) + + val collected = s.collect(collector) + + assertEquals("list size", nElements, collected.size()) + + // Proper elements, in encounter order + for (j <- 0 until nElements) + assertEquals("list element", sisters.get(j), collected.get(j)) + } + + @Test def collectorsToSet(): Unit = { + val nElements = 7 + val sisters = new ArrayList[String](nElements) + sisters.add("Maya") + sisters.add("Electra") + sisters.add("Taygete") + sisters.add("Alcyone") + sisters.add("Celaeno") + sisters.add("Sterope") + sisters.add("Merope") + + val s = sisters.stream() + + val collector = Collectors.toSet[String]() + + requireIdentityUnorderedCharacteristicOnly(collector.characteristics()) + + val collected = s.collect(collector) + + assertEquals("set size", nElements, collected.size()) + + // Proper elements + for (j <- 0 until nElements) { + val expected = sisters.get(j) + assertTrue( + "set element not in Set: ${expected}", + collected.contains(expected) + ) + } + } + +} diff --git a/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/lang/CharacterTest.scala b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/lang/CharacterTest.scala index a4ade77589..5dcf55f9b1 100644 --- a/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/lang/CharacterTest.scala +++ b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/lang/CharacterTest.scala @@ -487,6 +487,13 @@ class CharacterTest { } + @Test def isLowerCase(): Unit = { + assertTrue(Character.isLowerCase('a')) + assertTrue(Character.isLowerCase('z')) + assertFalse(Character.isLowerCase('A')) + assertFalse(Character.isLowerCase(-1)) + } + @Test def toLowerCaseLow(): Unit = { // low chars assertTrue(toLowerCase('\n') equals '\n') @@ -606,4 +613,20 @@ class CharacterTest { // unspecified for non-supplementary code points } + @Test def isWhitespace(): Unit = { + assertTrue(Character.isWhitespace(' ')) + assertTrue(Character.isWhitespace('\t')) + assertTrue(Character.isWhitespace('\n')) + assertTrue(Character.isWhitespace('\f')) + assertTrue(Character.isWhitespace('\r')) + assertTrue(Character.isWhitespace('\u001C')) // file separator + assertTrue(Character.isWhitespace('\u001D')) // group separator + assertTrue(Character.isWhitespace('\u001E')) // record separator + assertTrue(Character.isWhitespace('\u001F')) // unit separator + + assertFalse(Character.isWhitespace('\b')) + assertFalse(Character.isWhitespace('a')) + // https://github.com/scala-native/scala-native/issues/3154 + assertFalse(Character.isWhitespace(-1)) + } } diff --git a/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/nio/file/FilesTest.scala b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/nio/file/FilesTest.scala index 23f3f58e6a..2a112223bd 100644 --- a/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/nio/file/FilesTest.scala +++ b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/nio/file/FilesTest.scala @@ -754,51 +754,119 @@ class FilesTest { } } - @Test def filesWalkWalksDirectory(): Unit = { + @Test def filesWalk_File(): Unit = { + withTemporaryDirectory { dirFile => + val f0 = dirFile.toPath.resolve("f0") + + Files.createFile(f0) + assertTrue("a1", Files.exists(f0) && Files.isRegularFile(f0)) + + val it = Files.walk(f0).iterator() // walk file, not directory + + val files = scala.collection.mutable.Set.empty[Path] + while (it.hasNext) { + files += it.next() + } + + assertEquals("Unexpected number of files", 1, files.size) + assertTrue("stream should contain starting file", files contains f0) + } + } + + @Test def filesWalk_EmptyDir(): Unit = { + withTemporaryDirectory { dirFile => + val dir = dirFile.toPath() + val it = Files.walk(dir).iterator() + val files = scala.collection.mutable.Set.empty[Path] + while (it.hasNext) { + files += it.next() + } + + assertEquals("Unexpected number of files", 1, files.size) + assertTrue("stream should contain starting dir", files contains dir) + } + } + + @Test def filesWalk_Directory_OneDeep(): Unit = { withTemporaryDirectory { dirFile => val dir = dirFile.toPath() - val f0 = dir.resolve("f0") val f1 = dir.resolve("f1") - val d0 = dir.resolve("d0") - val f2 = d0.resolve("f2") + val f2 = dir.resolve("f2") + val d1 = dir.resolve("d1") + val d1f1 = d1.resolve("d1f1") - Files.createDirectory(d0) - Files.createFile(f0) Files.createFile(f1) Files.createFile(f2) - assertTrue("a1", Files.exists(d0) && Files.isDirectory(d0)) - assertTrue("a2", Files.exists(f0) && Files.isRegularFile(f0)) - assertTrue("a3", Files.exists(f1) && Files.isRegularFile(f1)) - assertTrue("a4", Files.exists(f2) && Files.isRegularFile(f2)) + assertTrue("a1", Files.exists(f1) && Files.isRegularFile(f1)) + assertTrue("a2", Files.exists(f2) && Files.isRegularFile(f2)) + + Files.createDirectory(d1) + Files.createFile(d1f1) + assertTrue("a3", Files.exists(d1) && Files.isDirectory(d1)) + assertTrue("a4", Files.exists(d1f1) && Files.isRegularFile(d1f1)) val it = Files.walk(dir).iterator() val files = scala.collection.mutable.Set.empty[Path] while (it.hasNext()) { files += it.next() } - assertTrue("a5", files.size == 5) - assertTrue("a6", files contains dir) - assertTrue("a7", files contains d0) - assertTrue("a8", files contains f2) - assertTrue("a9", files contains f0) - assertTrue("a10", files contains f1) + + assertEquals("Unexpected number of files", 5, files.size) + + assertTrue("stream should contain starting dir", files contains dir) + assertTrue("a5", files contains f1) + assertTrue("a6", files contains f1) + assertTrue("a7", files contains d1) + assertTrue("a8", files contains d1f1) } } - @Test def filesWalkWalksSingleFile(): Unit = { + @Test def filesWalk_Directory_TwoDeep(): Unit = { withTemporaryDirectory { dirFile => - val f0 = dirFile.toPath.resolve("f0") + val dir = dirFile.toPath() - Files.createFile(f0) - assertTrue("a1", Files.exists(f0) && Files.isRegularFile(f0)) + val f1 = dir.resolve("f1") + val f2 = dir.resolve("f2") + + val d1 = dir.resolve("d1") + val d1f1 = d1.resolve("d1f1") + + val d2 = d1.resolve("d2") + val d2f1 = d2.resolve("d2f1") - val it = Files.walk(f0).iterator() + Files.createFile(f1) + Files.createFile(f2) + assertTrue("a1", Files.exists(f1) && Files.isRegularFile(f1)) + assertTrue("a2", Files.exists(f2) && Files.isRegularFile(f2)) + + Files.createDirectory(d1) + Files.createFile(d1f1) + assertTrue("a3", Files.exists(d1) && Files.isDirectory(d1)) + assertTrue("a4", Files.exists(d1f1) && Files.isRegularFile(d1f1)) + + Files.createDirectory(d2) + Files.createFile(d2f1) + assertTrue("a5", Files.exists(d2) && Files.isDirectory(d2)) + assertTrue("a6", Files.exists(d2f1) && Files.isRegularFile(d2f1)) + + val it = Files.walk(dir).iterator() val files = scala.collection.mutable.Set.empty[Path] - while (it.hasNext) { + while (it.hasNext()) { files += it.next() } - assertTrue("a2", files.size == 1) - assertTrue("a3", files contains f0) + + assertEquals("Unexpected number of files", 7, files.size) + + assertTrue("stream should contain starting dir", files contains dir) + + assertTrue("a7", files contains f1) + assertTrue("a8", files contains f2) + + assertTrue("a9", files contains d1) + assertTrue("a10", files contains d1f1) + + assertTrue("a11", files contains d2) + assertTrue("a12", files contains d2f1) } } diff --git a/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/nio/file/WindowsPathTest.scala b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/nio/file/WindowsPathTest.scala new file mode 100644 index 0000000000..dd27b7ff6a --- /dev/null +++ b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/nio/file/WindowsPathTest.scala @@ -0,0 +1,694 @@ +package org.scalanative.testsuite.javalib.nio.file + +import java.nio.file._ + +import org.junit.{Test, BeforeClass} +import org.junit.Assert._ +import org.junit.Assume._ + +import scala.collection.mutable + +import org.scalanative.testsuite.utils.AssertThrows.assertThrows +import org.scalanative.testsuite.utils.Platform.isWindows + +object WindowsPathTest { + @BeforeClass + def assumeIsWindows(): Unit = { + assumeTrue( + "Not checking Windows paths on Unix", + isWindows + ) + } +} + +class WindowsPathTest { + // Test that are commented and marked with TODO represents known issues. + + @Test def pathsGet(): Unit = { + assertThrows(classOf[InvalidPathException], Paths.get("///")) + } + + @Test def pathGetNameCount(): Unit = { + assertTrue(Paths.get("/").getNameCount == 0) + assertTrue(Paths.get("x:/").getNameCount == 0) + // TODO: In JVM empty path has count 1 + // assertTrue(Paths.get("").getNameCount == 1) + assertTrue(Paths.get("foo").getNameCount == 1) + assertTrue(Paths.get("foo//bar").getNameCount == 2) + assertTrue(Paths.get("foo/bar/baz").getNameCount == 3) + assertTrue(Paths.get("/foo/bar/baz").getNameCount == 3) + assertTrue(Paths.get("x:/foo/bar/baz").getNameCount == 3) + assertTrue(Paths.get("././").getNameCount == 2) +// // TODO JVM 17 throws: InvalidPathException: Trailing char < > at index 4: ././ +// assertTrue(Paths.get("././ ").getNameCount == 3) + } + + @Test def pathGetName(): Unit = { + // TODO: + // assertEquals("", Paths.get("").getName(0).toString) + assertEquals("foo", Paths.get("foo").getName(0).toString) + assertEquals("foo", Paths.get("foo//bar").getName(0).toString) + assertEquals("bar", Paths.get("foo//bar").getName(1).toString) + + assertEquals("foo", Paths.get("foo/bar/baz").getName(0).toString) + assertEquals("bar", Paths.get("foo/bar/baz").getName(1).toString) + assertEquals("baz", Paths.get("foo/bar/baz").getName(2).toString) + + assertEquals("foo", Paths.get("/foo/bar/baz").getName(0).toString) + assertEquals("bar", Paths.get("/foo/bar/baz").getName(1).toString) + assertEquals("baz", Paths.get("/foo/bar/baz").getName(2).toString) + + assertEquals("foo", Paths.get("x:/foo/bar/baz").getName(0).toString) + assertEquals("bar", Paths.get("x:/foo/bar/baz").getName(1).toString) + assertEquals("baz", Paths.get("x:/foo/bar/baz").getName(2).toString) + } + + @Test def pathEndsWithWithAbsolutePath(): Unit = { + assertTrue(Paths.get("/foo/bar/baz").endsWith(Paths.get("baz"))) + assertTrue(Paths.get("x:/foo/bar/baz").endsWith(Paths.get("baz"))) + // TODO: on JVM ending cannot start with / + // assertFalse(Paths.get("/foo/bar/baz").endsWith(Paths.get("/baz"))) + // TODO: on JVM ending cannot start with / + // assertFalse(Paths.get("x:/foo/bar/baz").endsWith(Paths.get("/baz"))) + assertTrue(Paths.get("/foo/bar/baz").endsWith(Paths.get("bar/baz"))) + assertTrue(Paths.get("x:/foo/bar/baz").endsWith(Paths.get("bar/baz"))) + // TODO: on JVM ending cannot start with / + // assertFalse(Paths.get("/foo/bar/baz").endsWith(Paths.get("/bar/baz"))) + // TODO: on JVM ending cannot start with / + // assertFalse(Paths.get("x:/foo/bar/baz").endsWith(Paths.get("/bar/baz"))) + assertTrue(Paths.get("/foo/bar/baz").endsWith(Paths.get("foo/bar/baz"))) + assertTrue(Paths.get("x:/foo/bar/baz").endsWith(Paths.get("foo/bar/baz"))) + assertTrue(Paths.get("/foo/bar/baz").endsWith(Paths.get("/foo/bar/baz"))) + assertTrue( + Paths.get("x:/foo/bar/baz").endsWith(Paths.get("x:/foo/bar/baz")) + ) + } + + @Test def pathEndsWithWithRelativePath(): Unit = { + assertTrue(Paths.get("foo/bar/baz").endsWith(Paths.get("baz"))) + // TODO: on JVM ending cannot start with / + // assertFalse(Paths.get("foo/bar/baz").endsWith(Paths.get("/baz"))) + assertTrue(Paths.get("foo/bar/baz").endsWith(Paths.get("bar/baz"))) + // TODO: on JVM ending cannot start with / + // assertFalse(Paths.get("foo/bar/baz").endsWith(Paths.get("/bar/baz"))) + assertTrue(Paths.get("foo/bar/baz").endsWith(Paths.get("foo/bar/baz"))) + // TODO: on JVM ending cannot start with / + // assertFalse(Paths.get("foo/bar/baz").endsWith(Paths.get("/foo/bar/baz"))) + } + + @Test def pathGetFileName(): Unit = { + // TODO: on JVM empty path has a name "" + // assertEquals("", Paths.get("").getFileName.toString) + assertEquals("foo", Paths.get("foo").getFileName.toString) + assertEquals("foo", Paths.get("/foo").getFileName.toString) + assertEquals("foo", Paths.get("x:/foo").getFileName.toString) + assertEquals("bar", Paths.get("foo/bar").getFileName.toString) + assertEquals("bar", Paths.get("/foo/bar").getFileName.toString) + assertEquals("bar", Paths.get("x:/foo/bar").getFileName.toString) + // TODO: on JVM "/" has a no name + // assertEquals(null, Paths.get("/").getFileName) + // TODO: on JVM "x:/" has a no name + // assertEquals(null, Paths.get("x:/").getFileName) + assertEquals(null, Paths.get("x:").getFileName) + } + + @Test def pathSubpath(): Unit = { + assertEquals("", Paths.get("").subpath(0, 1).toString) + // TODO + // assertThrows(classOf[IllegalArgumentException], Paths.get("").subpath(0, 2)) + + assertEquals("foo", Paths.get("foo/bar/baz").subpath(0, 1).toString) + assertEquals("foo\\bar", Paths.get("foo/bar/baz").subpath(0, 2).toString) + assertEquals( + "foo\\bar\\baz", + Paths.get("foo/bar/baz").subpath(0, 3).toString + ) + assertEquals("bar\\baz", Paths.get("foo/bar/baz").subpath(1, 3).toString) + assertEquals("baz", Paths.get("foo/bar/baz").subpath(2, 3).toString) + + assertEquals("foo", Paths.get("/foo/bar/baz").subpath(0, 1).toString) + assertEquals("foo", Paths.get("x:/foo/bar/baz").subpath(0, 1).toString) + assertEquals("foo\\bar", Paths.get("/foo/bar/baz").subpath(0, 2).toString) + assertEquals("foo\\bar", Paths.get("x:/foo/bar/baz").subpath(0, 2).toString) + assertEquals( + "foo\\bar\\baz", + Paths.get("/foo/bar/baz").subpath(0, 3).toString + ) + assertEquals( + "foo\\bar\\baz", + Paths.get("x:/foo/bar/baz").subpath(0, 3).toString + ) + assertEquals("bar\\baz", Paths.get("/foo/bar/baz").subpath(1, 3).toString) + assertEquals("bar\\baz", Paths.get("x:/foo/bar/baz").subpath(1, 3).toString) + assertEquals("baz", Paths.get("/foo/bar/baz").subpath(2, 3).toString) + assertEquals("baz", Paths.get("x:/foo/bar/baz").subpath(2, 3).toString) + } + + @Test def pathGetParent(): Unit = { + assertEquals(null, Paths.get("").getParent) + assertEquals(null, Paths.get("x:").getParent) + assertEquals(null, Paths.get("foo").getParent) + assertEquals(null, Paths.get("/").getParent) + assertEquals(null, Paths.get("x:/").getParent) + assertEquals(null, Paths.get("\\").getParent) + assertEquals(null, Paths.get("x:\\").getParent) + assertEquals("foo", Paths.get("foo/bar").getParent.toString) + assertEquals("\\foo", Paths.get("/foo/bar").getParent.toString) + assertEquals("x:\\foo", Paths.get("x:/foo/bar").getParent.toString) + assertEquals("\\", Paths.get("/foo").getParent.toString) + assertEquals("x:\\", Paths.get("x:/foo").getParent.toString) + assertEquals("foo", Paths.get("foo/.").getParent.toString) + assertEquals(".", Paths.get("./.").getParent.toString) + } + + @Test def pathGetRoot(): Unit = { + assertEquals(null, Paths.get("").getRoot) + assertEquals(null, Paths.get("foo").getRoot) + assertEquals(null, Paths.get("foo/bar").getRoot) + assertEquals("\\", Paths.get("/foo").getRoot.toString) + assertEquals("x:\\", Paths.get("x:/foo").getRoot.toString) + assertEquals("\\", Paths.get("/foo/bar").getRoot.toString) + assertEquals("x:\\", Paths.get("x:/foo/bar").getRoot.toString) + assertEquals("\\", Paths.get("/foo///bar").getRoot.toString) + assertEquals("x:\\", Paths.get("x:/foo///bar").getRoot.toString) + assertEquals("\\", Paths.get("/").getRoot.toString) + assertEquals("x:\\", Paths.get("x:/").getRoot.toString) + } + + @Test def pathIsAbsolute(): Unit = { + assertFalse(Paths.get("").isAbsolute) + assertFalse(Paths.get("foo").isAbsolute) + assertFalse(Paths.get("foo/bar").isAbsolute) + assertFalse(Paths.get("/foo").isAbsolute) + assertTrue(Paths.get("x:/foo").isAbsolute) + assertFalse(Paths.get("/foo/bar").isAbsolute) + assertTrue(Paths.get("x:/foo/bar").isAbsolute) + assertFalse(Paths.get("/foo///bar").isAbsolute) + assertTrue(Paths.get("x:/foo///bar").isAbsolute) + assertFalse(Paths.get("/").isAbsolute) + assertTrue(Paths.get("x:/").isAbsolute) + } + + @Test def pathIterator(): Unit = { + import scala.language.implicitConversions + implicit def iteratorToSeq[T: scala.reflect.ClassTag]( + it: java.util.Iterator[T] + ): Seq[T] = { + val buf = new mutable.UnrolledBuffer[T]() + while (it.hasNext()) buf += it.next() + buf.toSeq + } + + // TODO + // assertEquals(Seq(""), Paths.get("").iterator.map(_.toString)) + assertEquals(Seq("foo"), Paths.get("foo").iterator.map(_.toString)) + assertEquals( + Seq("foo", "bar"), + Paths.get("foo/bar").iterator.map(_.toString) + ) + assertEquals( + Seq("foo", "bar"), + Paths.get("foo//bar").iterator.map(_.toString) + ) + assertEquals(Seq("foo"), Paths.get("/foo").iterator.map(_.toString)) + assertEquals(Seq("foo"), Paths.get("x:/foo").iterator.map(_.toString)) + assertEquals( + Seq("foo", "bar"), + Paths.get("/foo/bar").iterator.map(_.toString) + ) + assertEquals( + Seq("foo", "bar"), + Paths.get("x:/foo/bar").iterator.map(_.toString) + ) + assertEquals( + Seq("foo", "bar"), + Paths.get("/foo//bar").iterator.map(_.toString) + ) + assertEquals( + Seq("foo", "bar"), + Paths.get("x:/foo//bar").iterator.map(_.toString) + ) + } + + @Test def pathNormalize(): Unit = { + assertEquals("", Paths.get("").normalize.toString) + assertEquals("foo", Paths.get("foo").normalize.toString) + assertEquals("foo\\bar", Paths.get("foo/bar").normalize.toString) + assertEquals("foo\\bar", Paths.get("foo//bar").normalize.toString) + assertEquals("bar", Paths.get("foo/../bar").normalize.toString) + assertEquals("..\\bar", Paths.get("foo/../../bar").normalize.toString) + // TODO + // assertEquals("\\bar", Paths.get("/foo/../../bar").normalize.toString) + assertEquals("x:\\bar", Paths.get("x:/foo/../../bar").normalize.toString) + assertEquals("\\", Paths.get("/").normalize.toString) + assertEquals("x:\\", Paths.get("x:/").normalize.toString) + assertEquals("x:", Paths.get("x:").normalize.toString) + assertEquals("\\foo", Paths.get("/foo").normalize.toString) + assertEquals("x:\\foo", Paths.get("x:/foo").normalize.toString) + assertEquals("\\foo\\bar", Paths.get("/foo/bar").normalize.toString) + assertEquals("x:\\foo\\bar", Paths.get("x:/foo/bar").normalize.toString) + assertEquals("\\foo\\bar", Paths.get("/foo//bar").normalize.toString) + assertEquals("x:\\foo\\bar", Paths.get("x:/foo//bar").normalize.toString) + assertEquals("\\foo\\bar", Paths.get("/foo/bar/").normalize.toString) + assertEquals("x:\\foo\\bar", Paths.get("x:/foo/bar/").normalize.toString) + assertEquals("foo\\bar", Paths.get("./foo/bar/").normalize.toString) + assertEquals("..\\foo\\bar", Paths.get("../foo/bar/").normalize.toString) + assertEquals("\\foo\\bar", Paths.get("/foo/bar/.").normalize.toString) + assertEquals("x:\\foo\\bar", Paths.get("x:/foo/bar/.").normalize.toString) + assertEquals("foo\\bar", Paths.get("foo/bar/.").normalize.toString) + assertEquals("..\\foo\\bar", Paths.get("../foo/bar/.").normalize.toString) + assertEquals("..\\foo\\bar", Paths.get("../foo//bar/.").normalize.toString) + } + + @Test def pathStartsWith(): Unit = { + // assertTrue(Paths.get("").startsWith(Paths.get(""))) + assertTrue(Paths.get("foo").startsWith(Paths.get("foo"))) + assertTrue(Paths.get("foo/bar").startsWith(Paths.get("foo"))) + assertTrue(Paths.get("foo/bar/baz").startsWith(Paths.get("foo/bar"))) + assertFalse(Paths.get("foo").startsWith(Paths.get("bar"))) + assertFalse(Paths.get("foo/bar").startsWith(Paths.get("bar"))) + // TODO + // assertFalse(Paths.get("/").startsWith(Paths.get(""))) + assertFalse(Paths.get("x:/").startsWith(Paths.get(""))) + // TODO + // assertFalse(Paths.get("").startsWith(Paths.get("/"))) + assertTrue(Paths.get("/foo").startsWith(Paths.get("/"))) + assertTrue(Paths.get("x:/foo").startsWith(Paths.get("x:/"))) + assertTrue(Paths.get("/foo/bar").startsWith(Paths.get("/foo"))) + assertTrue(Paths.get("x:/foo/bar").startsWith(Paths.get("x:/foo"))) + assertTrue(Paths.get("/").startsWith(Paths.get("/"))) + assertFalse(Paths.get("x:/").startsWith(Paths.get("x:"))) + assertTrue(Paths.get("x:/").startsWith(Paths.get("x:\\"))) + assertFalse(Paths.get("/").startsWith("/foo")) + assertFalse(Paths.get("x:/").startsWith("x:/foo")) + } + + @Test def pathRelativize(): Unit = { + assertEquals("#1", "", Paths.get("").relativize(Paths.get("")).toString) + assertEquals( + "#2", + "bar", + Paths.get("foo").relativize(Paths.get("foo/bar")).toString + ) + assertEquals( + "#3", + "..", + Paths.get("foo/bar").relativize(Paths.get("foo")).toString + ) + assertEquals( + "#4", + "..\\bar", + Paths.get("foo").relativize(Paths.get("bar")).toString + ) + assertEquals( + "#5", + "..\\baz", + Paths + .get("foo/bar") + .relativize(Paths.get("foo/baz")) + .toString + ) + if (org.scalanative.testsuite.utils.Platform.executingInJVMOnJDK8OrLower) { + // TODO Java 8- +// assertEquals( +// "#6-JVM8", +// "..\\foo", +// Paths.get("").relativize(Paths.get("foo")).toString +// ) + } else { + assertEquals( + "#6", + "foo", + Paths.get("").relativize(Paths.get("foo")).toString + ) + } + if (org.scalanative.testsuite.utils.Platform.executingInJVMOnJDK8OrLower) { + // TODO Java 8- +// assertEquals( +// "#7-JVM8", +// "..\\..\\..\\bar", +// Paths +// .get("foo/../bar") +// .relativize(Paths.get("bar")) +// .toString +// ) + } else { + assertEquals( + "#7", + "", + Paths + .get("foo/../bar") + .relativize(Paths.get("bar")) + .toString + ) + } + assertEquals( + "#8", + "..\\foo", + Paths + .get("bar") + .relativize(Paths.get("bar/../foo")) + .toString + ) + assertThrows( + "#9", + classOf[IllegalArgumentException], + assertEquals("", Paths.get("/").relativize(Paths.get("")).toString) + ) + assertEquals("#10", "", Paths.get("/").relativize(Paths.get("/")).toString) + assertEquals( + "#11", + "", + Paths.get("x:/").relativize(Paths.get("x:/")).toString + ) + assertEquals( + "#12", + "bar", + Paths.get("/foo").relativize(Paths.get("/foo/bar")).toString + ) + assertEquals( + "#13", + "bar", + Paths.get("x:/foo").relativize(Paths.get("x:/foo/bar")).toString + ) + assertEquals( + "#14", + "..", + Paths.get("/foo/bar").relativize(Paths.get("/foo")).toString + ) + assertEquals( + "#15", + "..", + Paths.get("x:/foo/bar").relativize(Paths.get("x:/foo")).toString + ) + assertEquals( + "#17", + "..\\bar", + Paths.get("/foo").relativize(Paths.get("/bar")).toString + ) + assertEquals( + "#18", + "..\\bar", + Paths.get("x:/foo").relativize(Paths.get("x:/bar")).toString + ) + assertEquals( + "#19", + "..\\baz", + Paths + .get("/foo/bar") + .relativize(Paths.get("/foo/baz")) + .toString + ) + assertEquals( + "#20", + "..\\baz", + Paths + .get("x:/foo/bar") + .relativize(Paths.get("x:/foo/baz")) + .toString + ) + assertEquals( + "#21", + "foo", + Paths.get("/").relativize(Paths.get("/foo")).toString + ) + if (org.scalanative.testsuite.utils.Platform.executingInJVMOnJDK8OrLower) { + // TODO Java 8- +// assertEquals( +// "#22-JVM8", +// "..\\..\\..\\bar", +// Paths +// .get("/foo/../bar") +// .relativize(Paths.get("/bar")) +// .toString +// ) + } else { + assertEquals( + "#22", + "", + Paths + .get("/foo/../bar") + .relativize(Paths.get("/bar")) + .toString + ) + } + if (org.scalanative.testsuite.utils.Platform.executingInJVMOnJDK8OrLower) { + // TODO Java 8- +// assertEquals( +// "#23-JVM8", +// "..\\..\\..\\bar", +// Paths +// .get("x:/foo/../bar") +// .relativize(Paths.get("x:/bar")) +// .toString +// ) + } else { + assertEquals( + "#24", + "", + Paths + .get("x:/foo/../bar") + .relativize(Paths.get("x:/bar")) + .toString + ) + } + assertEquals( + "#25", + "..\\foo", + Paths + .get("/bar") + .relativize(Paths.get("/bar/../foo")) + .toString + ) + assertEquals( + "#26", + "..\\foo", + Paths + .get("x:/bar") + .relativize(Paths.get("x:/bar/../foo")) + .toString + ) + assertEquals( + "#27", + "b\\c.jar", + Paths + .get("C:\\a") + .relativize(Paths.get("C:\\a\\b\\c.jar")) + .toString + ) + } + + @Test def pathResolve(): Unit = { + assertEquals("", Paths.get("").resolve(Paths.get("")).toString) + assertEquals("\\", Paths.get("/").resolve(Paths.get("")).toString) + assertEquals("x:\\", Paths.get("x:/").resolve(Paths.get("")).toString) + assertEquals( + "foo\\foo\\bar", + Paths.get("foo").resolve(Paths.get("foo/bar")).toString + ) + assertEquals( + "foo\\bar\\foo", + Paths.get("foo/bar").resolve(Paths.get("foo")).toString + ) + assertEquals( + "foo\\bar", + Paths.get("foo").resolve(Paths.get("bar")).toString + ) + assertEquals( + "foo\\bar\\foo\\baz", + Paths + .get("foo/bar") + .resolve(Paths.get("foo/baz")) + .toString + ) + assertEquals("foo", Paths.get("").resolve(Paths.get("foo")).toString) + assertEquals( + "foo\\..\\bar\\bar", + Paths + .get("foo/../bar") + .resolve(Paths.get("bar")) + .toString + ) + + assertEquals("\\", Paths.get("/").resolve(Paths.get("/")).toString) + assertEquals("x:\\", Paths.get("x:/").resolve(Paths.get("x:/")).toString) + // TODO + // assertEquals( + // "\\foo\\bar", Paths.get("/foo").resolve(Paths.get("/foo/bar")).toString + // ) + assertEquals( + "x:\\foo\\bar", + Paths.get("x:/foo").resolve(Paths.get("x:/foo/bar")).toString + ) + // TODO + // assertEquals( + // "\\foo", Paths.get("/foo/bar").resolve(Paths.get("/foo")).toString, + // ) + assertEquals( + "x:\\foo", + Paths.get("x:/foo/bar").resolve(Paths.get("x:/foo")).toString + ) + // TODO + // assertEquals("\\bar", Paths.get("/foo").resolve(Paths.get("/bar")).toString, ) + assertEquals( + "x:\\bar", + Paths.get("x:/foo").resolve(Paths.get("x:/bar")).toString + ) + // TODO + // assertEquals( + // "\\foo\\baz", + // Paths + // .get("/foo/bar") + // .resolve(Paths.get("/foo/baz")) + // .toString + // ) + assertEquals( + "x:\\foo\\baz", + Paths + .get("x:/foo/bar") + .resolve(Paths.get("x:/foo/baz")) + .toString + ) + + assertEquals("\\foo", Paths.get("/").resolve(Paths.get("/foo")).toString) + assertEquals( + "x:\\foo", + Paths.get("x:/").resolve(Paths.get("x:/foo")).toString + ) + // TODO + // assertEquals( + // "\\bar", Paths.get("/foo/../bar").resolve(Paths.get("/bar")).toString + // ) + assertEquals( + "x:\\bar", + Paths.get("x:/foo/../bar").resolve(Paths.get("x:/bar")).toString + ) + } + + @Test def pathResolveSibling(): Unit = { + assertEquals("", Paths.get("").resolveSibling(Paths.get("")).toString) + assertEquals("", Paths.get("/").resolveSibling(Paths.get("")).toString) + assertEquals("", Paths.get("x:/").resolveSibling(Paths.get("")).toString) + assertEquals( + "foo\\bar", + Paths + .get("foo") + .resolveSibling(Paths.get("foo/bar")) + .toString + ) + assertEquals( + "foo\\foo", + Paths + .get("foo/bar") + .resolveSibling(Paths.get("foo")) + .toString + ) + assertEquals( + "bar", + Paths.get("foo").resolveSibling(Paths.get("bar")).toString + ) + assertEquals( + "foo\\foo\\baz", + Paths + .get("foo/bar") + .resolveSibling(Paths.get("foo/baz")) + .toString + ) + assertEquals("foo", Paths.get("").resolveSibling(Paths.get("foo")).toString) + assertEquals( + "foo\\..\\bar", + Paths + .get("foo/../bar") + .resolveSibling(Paths.get("bar")) + .toString + ) + + assertEquals("\\", Paths.get("/").resolveSibling(Paths.get("/")).toString) + assertEquals( + "x:\\", + Paths.get("x:/").resolveSibling(Paths.get("x:/")).toString + ) + assertEquals( + "\\foo\\bar", + Paths + .get("/foo") + .resolveSibling(Paths.get("/foo/bar")) + .toString + ) + assertEquals( + "x:\\foo\\bar", + Paths + .get("x:/foo") + .resolveSibling(Paths.get("x:/foo/bar")) + .toString + ) + // TODO + // assertEquals( + // "\\foo", + // Paths + // .get("/foo/bar") + // .resolveSibling(Paths.get("/foo")) + // .toString, + // ) + assertEquals( + "x:\\foo", + Paths + .get("x:/foo/bar") + .resolveSibling(Paths.get("x:/foo")) + .toString + ) + assertEquals( + "\\bar", + Paths.get("/foo").resolveSibling(Paths.get("/bar")).toString + ) + assertEquals( + "x:\\bar", + Paths.get("x:/foo").resolveSibling(Paths.get("x:/bar")).toString + ) + // TODO + // assertEquals( + // "\\foo\\baz", + // Paths + // .get("/foo/bar") + // .resolveSibling(Paths.get("/foo/baz")) + // .toString, + // ) + assertEquals( + "x:\\foo\\baz", + Paths + .get("x:/foo/bar") + .resolveSibling(Paths.get("x:/foo/baz")) + .toString + ) + assertEquals( + "\\foo", + Paths.get("/").resolveSibling(Paths.get("/foo")).toString + ) + assertEquals( + "x:\\foo", + Paths.get("x:/").resolveSibling(Paths.get("x:/foo")).toString + ) + // TODO + // assertEquals( + // "\\bar" + // Paths + // .get("/foo/../bar") + // .resolveSibling(Paths.get("/bar")) + // .toString, + // ) + assertEquals( + "x:\\bar", + Paths + .get("x:/foo/../bar") + .resolveSibling(Paths.get("x:/bar")) + .toString + ) + } + + @Test def pathEquals(): Unit = { + assertTrue(Paths.get("") == Paths.get("")) + assertTrue(Paths.get("x:////") == Paths.get("x:\\")) + assertTrue(Paths.get("/.") != Paths.get("\\")) + assertTrue(Paths.get("x:/.") != Paths.get("x:\\")) + } +} diff --git a/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/DefaultFormatterTest.scala b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/DefaultFormatterTest.scala index 913b5416b8..151661ea13 100644 --- a/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/DefaultFormatterTest.scala +++ b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/DefaultFormatterTest.scala @@ -774,7 +774,7 @@ class DefaultFormatterTest { @Test def formatForFloatDoubleConversionType_sS_WithExcessPrecision() : Unit = { - val triple = Array( + val triple = Array[Array[Any]]( Array(1.1f, "%-6.4s", "1.1 "), Array(1.1f, "%.5s", "1.1"), Array(1.1d, "%-6.4s", "1.1 "), @@ -1058,7 +1058,7 @@ class DefaultFormatterTest { f.format("%#c", 'c'.asInstanceOf[Object]) ) - val triple = Array( + val triple = Array[Array[Any]]( Array('c', "%c", "c"), Array('c', "%-2c", "c "), Array('\u0123', "%c", "\u0123"), @@ -1112,7 +1112,7 @@ class DefaultFormatterTest { } @Test def formatForLegalByteShortIntegerLongConversionType_d(): Unit = { - val triple = Array( + val triple = Array[Array[Any]]( Array(0, "%d", "0"), Array(0, "%10d", " 0"), Array(0, "%-1d", "0"), @@ -1199,7 +1199,7 @@ class DefaultFormatterTest { } @Test def formatForLegalByteShortIntegerLongConversionType_o(): Unit = { - val triple = Array( + val triple = Array[Array[Any]]( Array(0, "%o", "0"), Array(0, "%-6o", "0 "), Array(0, "%08o", "00000000"), @@ -1258,7 +1258,7 @@ class DefaultFormatterTest { } @Test def formatForLegalByteShortIntegerLongConversionType_xX(): Unit = { - val triple = Array( + val triple = Array[Array[Any]]( Array(0, "%x", "0"), Array(0, "%-8x", "0 "), Array(0, "%06x", "000000"), @@ -1783,7 +1783,7 @@ class DefaultFormatterTest { } @Test def formatForFloatDoubleConversionType_eE(): Unit = { - val tripleE = Array( + val tripleE = Array[Array[Any]]( Array(0f, "%e", "0.000000e+00"), Array(0f, "%#.0e", "0.e+00"), Array(0f, "%#- (9.8e", " 0.00000000e+00"), @@ -1987,7 +1987,7 @@ class DefaultFormatterTest { } @Test def formatForFloatDoubleConversionType_gG(): Unit = { - val tripleG = Array( + val tripleG = Array[Array[Any]]( Array(1001f, "%g", "1001.00"), Array(1001f, "%- (,9.8g", " 1,001.0000"), Array(1001f, "%+0(,8.4g", "+001,001"), @@ -2224,7 +2224,7 @@ class DefaultFormatterTest { @Test def formatForFloatDoubleMaxValueConversionType_f(): Unit = { // These need a way to reproduce the same decimal representation of // extreme values as JVM. - val tripleF = Array( + val tripleF = Array[Array[Any]]( Array(-1234567890.012345678d, "% 0#(9.8f", "(1234567890.01234580)"), Array( java.lang.Double.MAX_VALUE, @@ -2312,7 +2312,7 @@ class DefaultFormatterTest { } @Test def formatForFloatDoubleConversionType_f(): Unit = { - val tripleF = Array( + val tripleF = Array[Array[Any]]( Array(0f, "%f", "0.000000"), Array(0f, "%#.3f", "0.000"), Array(0f, "%,5f", "0.000000"), @@ -2487,7 +2487,7 @@ class DefaultFormatterTest { Array(java.lang.Double.NEGATIVE_INFINITY, "%#+0(1.6f", "(Infinity)"), Array(java.lang.Double.NEGATIVE_INFINITY, "%-+(8.4f", "(Infinity)"), Array(java.lang.Double.NEGATIVE_INFINITY, "% 0#(9.8f", "(Infinity)") - ).asInstanceOf[Array[Array[Any]]] + ) val input: Int = 0 val pattern: Int = 1 val output: Int = 2 @@ -2535,7 +2535,7 @@ class DefaultFormatterTest { } @Test def formatForFloatDoubleConversionType_aA(): Unit = { - val tripleA = Array( + val tripleA = Array[Array[Any]]( Array(-0f, "%a", "-0x0.0p0"), Array(-0f, "%#.3a", "-0x0.000p0"), Array(-0f, "%5a", "-0x0.0p0"), diff --git a/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/stream/CollectorTest.scala b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/stream/CollectorTest.scala new file mode 100644 index 0000000000..cd728165d6 --- /dev/null +++ b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/stream/CollectorTest.scala @@ -0,0 +1,30 @@ +package org.scalanative.testsuite.javalib.util.stream + +import java.util.stream._ +import java.util.stream.Collector.Characteristics + +import org.junit.Test +import org.junit.Assert._ + +import org.scalanative.testsuite.utils.AssertThrows.assertThrows + +class CollectorTest { + @Test def collecterCharacteristicsEnum(): Unit = { + assertEquals("values", 3, Characteristics.values().size) + + assertEquals("CONCURRENT", 0, Characteristics.valueOf("CONCURRENT").ordinal) + + assertEquals("UNORDERED", 1, Characteristics.valueOf("UNORDERED").ordinal) + assertEquals( + "IDENTITY_FINISH", + 2, + Characteristics.valueOf("IDENTITY_FINISH").ordinal + ) + + assertThrows( + classOf[IllegalArgumentException], + Characteristics.valueOf("").ordinal + ) + } + +} diff --git a/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/stream/DoubleStreamTest.scala b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/stream/DoubleStreamTest.scala new file mode 100644 index 0000000000..d41ca49f2c --- /dev/null +++ b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/stream/DoubleStreamTest.scala @@ -0,0 +1,1138 @@ +package org.scalanative.testsuite.javalib.util.stream + +/* It is hard to assure oneself that the desired primitive DoubleStream, + * LongStream, & IntStream are being used instead of a/an (object) Stream. + * Create DoubleStream & kin using the methods in Arrays. + * + * Do not import ArrayList here, to guard against a Test populating + * an ArrayList and then inadvertently creating an (object) Stream with it. + * Use ju.ArrayList surgically at the points of use. + */ + +import java.{lang => jl} + +import java.{util => ju} +import java.util.Arrays +import java.util.concurrent.{CountDownLatch, TimeUnit} +import java.util.concurrent.CountDownLatch._ + +import java.util.{OptionalDouble, DoubleSummaryStatistics} +import java.util.function.{DoubleConsumer, DoubleFunction, DoubleSupplier} +import java.util.function.Supplier + +import java.util.stream._ + +import org.junit.Test +import org.junit.Assert._ +import org.junit.Ignore + +import org.scalanative.testsuite.utils.AssertThrows.assertThrows + +/* Design Note: + * Two tests requiring classes which have not been implemented yet + * are commented out: + * - doubleStreamMapToInt, required IntStream + * - doubleStreamMapToLong, requires LongStream + */ + +class DoubleStreamTest { + + final val epsilon = 0.00001 // tolerance for Floating point comparisons. + + @Test def doubleStreamBuilderCanBuildAnEmptyStream(): Unit = { + val s = DoubleStream.builder().build() + val it = s.iterator() + assertFalse(it.hasNext()) + } + + @Test def doubleStreamEmptyIsEmpty(): Unit = { + val s = DoubleStream.empty() + val it = s.iterator() + assertFalse(it.hasNext()) + } + + @Test def doubleStreamOf_SingleElement(): Unit = { + val expected = 7.7 + val s = DoubleStream.of(expected) + val it = s.iterator() + assertTrue("DoubleStream should not be empty", it.hasNext()) + assertEquals("unexpected element", it.nextDouble(), expected, epsilon) + assertFalse("DoubleStream should be empty and is not.", it.hasNext()) + } + + @Test def doubleStreamOf_MultipleElements(): Unit = { + val s = DoubleStream.of(1.1, 2.2, 3.3) + val it = s.iterator() + assertEquals("element_1", 1.1, it.nextDouble(), epsilon) + assertEquals("element_2", 2.2, it.nextDouble(), epsilon) + assertEquals("element_3", 3.3, it.nextDouble(), epsilon) + assertFalse(it.hasNext()) + } + + @Test def doubleStreamFlatMapWorks(): Unit = { + val s = DoubleStream.of(1.1, 2.2, 3.3) + + val mapper = new DoubleFunction[DoubleStream] { + override def apply(v: Double): DoubleStream = + DoubleStream.of(v, v) + } + + val s2 = s.flatMap(mapper) + + val it = s2.iterator() + + assertEquals(1.1, it.nextDouble(), epsilon) + assertEquals(1.1, it.nextDouble(), epsilon) + + assertEquals(2.2, it.nextDouble(), epsilon) + assertEquals(2.2, it.nextDouble(), epsilon) + + assertEquals(3.3, it.nextDouble(), epsilon) + assertEquals(3.3, it.nextDouble(), epsilon) + + assertFalse(it.hasNext()) + } + + @Test def doubleStreamForEachWorks(): Unit = { + val s = DoubleStream.of(-1.1, -2.2, -3.3, 0.0) + + var sum = 0.0 + val doubleConsumer = new DoubleConsumer { + def accept(d: Double): Unit = sum += d + } + + s.forEach(doubleConsumer) + assertEquals(-6.6, sum, epsilon) + } + + @Test def doubleStreamFlatMapWorksTwice(): Unit = { + val s = DoubleStream.of(1.1, 2.2, 3.3) + + val mapper1 = new DoubleFunction[DoubleStream] { + override def apply(v: Double): DoubleStream = + DoubleStream.of(v, v) + } + + val mapper2 = new DoubleFunction[DoubleStream] { + override def apply(v: Double): DoubleStream = + DoubleStream.of(-v, -v, -v) + } + + val s2 = s + .flatMap(mapper1) + .flatMap(mapper2) + +// format: off + val expected = + Seq( + -1.1, -1.1, -1.1, -1.1, -1.1, -1.1, + -2.2, -2.2, -2.2, -2.2, -2.2, -2.2, + -3.3, -3.3, -3.3, -3.3, -3.3, -3.3 + ) +// format: on + + val result = scala.collection.mutable.ArrayBuffer.empty[Double] + val it = s2.iterator() + + while (it.hasNext()) { + result += it.nextDouble() + } + + assertTrue(result == expected) + } + + @Test def doubleStreamOnCloseWorks(): Unit = { + var latch = new CountDownLatch(1) + + class Closer(cdLatch: CountDownLatch) extends Runnable { + override def run(): Unit = cdLatch.countDown() + } + + val s = DoubleStream.empty().onClose(new Closer(latch)) + s.close() + + val timeout = 30L + assertTrue( + "close handler did not run within ${timeout} seconds", + latch.await(timeout, TimeUnit.SECONDS) + ) + } + +// Static methods ------------------------------------------------------- + + @Test def doubleStreamConcat(): Unit = { + val a = DoubleStream.of(9.9, 8.8, 6.6, 7.7, 5.5) + val b = DoubleStream.of(0.0, 3.3, 2.2) + + val s = DoubleStream.concat(a, b) + + val it = s.iterator() + assertNotNull("s.iterator() should not be NULL", it) + assertTrue("stream should not be empty", it.hasNext()) + + assertEquals(s"element", 9.9, it.nextDouble(), epsilon) + assertEquals(s"element", 8.8, it.nextDouble(), epsilon) + assertEquals(s"element", 6.6, it.nextDouble(), epsilon) + assertEquals(s"element", 7.7, it.nextDouble(), epsilon) + assertEquals(s"element", 5.5, it.nextDouble(), epsilon) + + assertEquals(s"element", 0.0, it.nextDouble(), epsilon) + assertEquals(s"element", 3.3, it.nextDouble(), epsilon) + assertEquals(s"element", 2.2, it.nextDouble(), epsilon) + + assertFalse("stream should be empty", it.hasNext()) + } + + @Test def doubleStreamGenerate(): Unit = { + val nElements = 5 + val data = new Array[Double](nElements) + data(0) = 0.0 + data(1) = 1.1 + data(2) = 2.2 + data(3) = 3.3 + data(4) = 4.4 + + val src = new DoubleSupplier() { + var count = -1 + + def getAsDouble(): Double = { + count += 1 + data(count % nElements) + } + } + + val s = DoubleStream.generate(src) + + val it = s.iterator() + + assertTrue("DoubleStream should not be empty", it.hasNext()) + + for (j <- 0 until nElements) + assertEquals(s"data(${j})", it.nextDouble(), data(j), epsilon) + + assertTrue("DoubleStream should not be empty", it.hasNext()) + } + + @Test def doubleStreamIterate_Unbounded(): Unit = { + val nElements = 4 + var count = -1.0 + + val expectedSeed = 3.14 + + val expected = Seq(expectedSeed, 4.24, 5.34, 6.44) + + val s = DoubleStream.iterate( + expectedSeed, + e => e + 1.1 + ) + + val it = s.iterator() + + assertTrue("DoubleStream should not be empty", it.hasNext()) + + for (j <- 0 until nElements) + assertEquals(s"element: ${j})", expected(j), it.nextDouble(), epsilon) + + assertTrue("DoubleStream should not be empty", it.hasNext()) + } + + @Test def doubleStreamOf_NoItems(): Unit = { + val s = DoubleStream.of() + + val it = s.iterator() + assertFalse("DoubleStream should be empty", it.hasNext()) + } + + @Test def doubleStreamOf_OneItem(): Unit = { + val expected = 6.67 + val s = DoubleStream.of(expected) + + val it = s.iterator() + assertTrue("stream should not be empty", it.hasNext()) + assertEquals(s"element", expected, it.nextDouble(), epsilon) + + assertFalse("DoubleStream should be empty", it.hasNext()) + } + + // DoubleStream.of() with more than two arguments is exercised in many other + // places in this file, so no Test for that case here. + +// Instance methods ----------------------------------------------------- + + @Test def doubleStreamAllMatch_EmptyStream(): Unit = { + val s = DoubleStream.empty() + var predEvaluated = false + + val matched = s.allMatch((e) => { predEvaluated = true; true }) + assertTrue("unexpected match failure", matched) + assertFalse("predicate should not have been evaluated", predEvaluated) + } + + @Test def doubleStreamAllMatch_True(): Unit = { + + /* DoubleStream.allMatch() will return "true" on an empty stream. + * Try to distinguish that "true" from an actual all-elements-match "true" + * Since streams can not be re-used, count s0. If it is non-empty, assume + * its sibling s is also non-empty, distingishing the two "true"s. + */ + val s0 = DoubleStream.of(0.0, 1.1, 2.2, 3.3) + assertTrue("unexpected empty stream", s0.count > 0) + + val s = DoubleStream.of(0.0, 1.1, 2.2, 3.3) + + val matched = s.allMatch((e) => { (e >= 0.0) && (e < 10.0) }) + assertTrue("unexpected match failure", matched) + } + + @Test def doubleStreamAllMatch_False(): Unit = { + val s = DoubleStream.of(0.0, 1.1, 2.2, 3.3) + + val matched = s.allMatch((e) => e > 2.2) + assertFalse("unexpected match", matched) + } + + @Test def doubleStreamAnyMatch_EmptyStream(): Unit = { + val s = DoubleStream.empty() + var predEvaluated = false + + val matched = s.anyMatch((e) => { predEvaluated = true; true }) + assertFalse("unexpected match", matched) + assertFalse("predicate should not have been evaluated", predEvaluated) + } + + @Test def doubleStreamAnyMatch_True(): Unit = { + val s = DoubleStream.of(0.0, 1.1, 2.2, 3.3) + + val matched = s.anyMatch((e) => (e > 1.0) && (e < 2.0)) + assertTrue("unexpected predicate failure", matched) + } + + @Test def doubleStreamAnyMatch_False(): Unit = { + val s = DoubleStream.of(0.0, 1.1, 2.2, 3.3) + + val matched = s.anyMatch((e) => e > 10.0) + assertFalse("unexpected predicate failure", matched) + } + + @Test def doubleStreamAverage_EmptyStream(): Unit = { + val s = DoubleStream.empty() + + val optional = s.average() + + assertFalse(s"expected empty optional, got value", optional.isPresent()) + } + + @Test def doubleStreamAverage(): Unit = { + val nElements = 8 + + val wild = new Array[Double](nElements) // holds arbitrarily jumbled data + wild(0) = 132.45 + wild(1) = 4.21 + wild(2) = 2.11 + wild(3) = 55.31 + wild(4) = 16.68 + wild(5) = 77.3 + wild(6) = 44.61 + wild(7) = 60.9 + + val expectedAverage = 49.19625 + + val s = DoubleStream.of(wild: _*) + + val optional = s.average() + + assertTrue("unexpected empty optional", optional.isPresent()) + + assertEquals( + "unexpected average", + expectedAverage, + optional.getAsDouble(), + epsilon + ) + } + + @Test def doubleStreamBoxed(): Unit = { + val nElements = 5 + val data = new Array[Double](nElements) + data(0) = 0.0 + data(1) = 1.1 + data(2) = 2.2 + data(3) = 3.3 + data(4) = 4.4 + + val sd = Arrays.stream(data) + + assertTrue( + "stream should be a DoubleStream", + sd.isInstanceOf[DoubleStream] + ) + + val sBoxed = sd.boxed() + + assertTrue( + "resultant stream should be boxed Stream[Double]", + sBoxed.isInstanceOf[Stream[_]] + ) + + assertFalse( + "resultant stream should not be a DoubleStream", + sBoxed.isInstanceOf[DoubleStream] + ) + } + + @Test def doubleStreamCollect_EmptyStreamUsingSupplier(): Unit = { + type U = ju.ArrayList[Double] + + val s = DoubleStream.empty() + + val supplier = new Supplier[U]() { + def get(): U = new U + } + + val collected = s.collect( + supplier, + (list: U, e: Double) => list.add(e), + (list1: U, list2: U) => list1.addAll(list2) + ) + + // Proper size + assertEquals("list size", 0, collected.size()) + } + + @Test def doubleStreamCollect_UsingSupplier(): Unit = { + type U = ju.ArrayList[Double] + + val nElements = 5 + val data = new Array[Double](nElements) + data(0) = 0.0 + data(1) = 1.1 + data(2) = 2.2 + data(3) = 3.3 + data(4) = 4.4 + + val s = Arrays.stream(data) + + val supplier = new Supplier[U]() { + def get(): U = new U + } + + val collected = s.collect( + supplier, + (list: U, e: Double) => list.add(e), + (list1: U, list2: U) => list1.addAll(list2) + ) + + // Proper size + assertEquals("list size", nElements, collected.size()) + + // Proper elements, in encounter order + for (j <- 0 until nElements) + assertEquals("list element", data(j), collected.get(j), epsilon) + } + + @Test def doubleStreamCollect_UsingSummaryStatistics(): Unit = { + /* This is the example given at the top of the JVM + * DoubleSummaryStatistics description, translate to Scala. + * + * It tests DoubleStream.collect() using user-designated arguments. + * + * Along the way, it shows a succinct way of using collect() in Scala. + */ + + type U = DoubleSummaryStatistics + + val nElements = 6 + val expectedSum = 16.5 + val expectedMin = 0.0 + val expectedAverage = expectedSum / nElements + val expectedMax = 5.5 + + val data = new Array[Double](nElements) + data(0) = 1.1 + data(1) = 2.2 + data(2) = expectedMin + data(3) = 3.3 + data(4) = expectedMax + data(5) = 4.4 + + val s = Arrays.stream(data) + + val collected = s.collect( + () => new U, + (summary: U, e: Double) => summary.accept(e), + (summary1: U, summary2: U) => summary1.combine(summary2) + ) + + // Proper stats + assertEquals("count", nElements, collected.getCount()) + assertEquals("sum", expectedSum, collected.getSum(), epsilon) + assertEquals("min", expectedMin, collected.getMin(), epsilon) + assertEquals("average", expectedAverage, collected.getAverage(), epsilon) + assertEquals("max", expectedMax, collected.getMax(), epsilon) + } + + @Test def doubleStreamCount(): Unit = { + val expectedCount = 5 + + val s = DoubleStream.of(0.0, 1.1, 2.2, 3.3, 4.4) + + assertEquals(s"unexpected element count", expectedCount, s.count()) + } + + @Test def doubleStreamDistinct(): Unit = { + + // There must be a harder way of doing this setup. + // Using " scala.jdk.CollectionConverters._" and futzing with it + // having a different name in Scala 2.12 might just be a greater + // time suck. + + val expectedCount = 5 + val range = 0 until expectedCount + + val expectedElements = new Array[Double](expectedCount) + for (j <- range) + expectedElements(j) = j * 2.0 + + val expectedSet = new ju.HashSet[Double]() + for (j <- range) + expectedSet.add(expectedElements(j)) + + val s = DoubleStream + .of(expectedElements: _*) + .flatMap((e) => DoubleStream.of(e, e, e)) + .distinct() + + assertEquals(s"unexpected count", expectedCount, s.count()) + + // Count is good, now did we get expected elements and only them? + + // count() exhausted s1, so create second stream, s2 + + val s2 = DoubleStream + .of(expectedElements: _*) + .flatMap((e) => DoubleStream.of(e, e, e)) + .distinct() + + s2.forEach((e) => { + val inSet = expectedSet.remove(e) + // Detect both unknown elements and + // occurances of unwanted, non-distinct elements + assertTrue(s"element ${e} not in expectedSet", inSet) + }) + + // Iff the stream was proper & distinct, the expected set should be empty. + assertTrue("expectedSet has remaining elements", expectedSet.isEmpty()) + } + + @Test def doubleStreamFindAny_Null(): Unit = { + val s = DoubleStream.of(null.asInstanceOf[Double]) + // Double nulls get seen as 0.0 + val optional = s.findAny() + assertTrue("unexpected failure to findAny", optional.isPresent()) + assertEquals("unexpected element", 0.0, optional.getAsDouble(), epsilon) + } + + @Test def doubleStreamFindAny_True(): Unit = { + val s = DoubleStream.of(0.0, 1.1, 2.2, 3.3) + val acceptableValues = List(0.0, 1.1, 2.2, 3.3) + + val optional = s.findAny() + + assertTrue("unexpected empty optional", optional.isPresent()) + + val found = optional.getAsDouble() + assertTrue( + s"unexpected value: '${found}'", + acceptableValues.contains(found) + ) + } + + @Test def doubleStreamFindAny_False(): Unit = { + val s = DoubleStream.empty() + + val optional = s.findAny() + + assertFalse("unexpected failure", optional.isPresent()) + } + + @Test def doubleStreamFindFirst_True(): Unit = { + val expectedFirst = 0.0 + val s = DoubleStream.of(expectedFirst, 1.1, 2.2, 3.3) + + val optional = s.findFirst() + + assertTrue("unexpected empty optional", optional.isPresent()) + assertEquals( + "unexpected mismatch", + expectedFirst, + optional.getAsDouble(), + epsilon + ) + } + + @Test def doubleStreamFindFirst_False(): Unit = { + val s = DoubleStream.empty() + + val optional = s.findFirst() + + assertFalse("unexpected failure", optional.isPresent()) + } + + @Test def doubleStreamFilter(): Unit = { + val expectedCount = 4 + + val s0 = DoubleStream.of( + 101.1, 1.1, 102.2, 2.2, 103.2, 3.3, 4.4 + ) + + val s1 = s0.filter(e => e < 100.0) + assertEquals(s"unexpected element count", expectedCount, s1.count()) + } + + @Test def doubleStreamForeachOrdered(): Unit = { + val s = DoubleStream.of(1.1, 2.2, 3.3) + + var sum = 0.0 + val consumer = new DoubleConsumer { + def accept(i: Double): Unit = { sum = sum + i } + } + s.forEachOrdered(consumer) + assertEquals("unexpected sum", 6.6, sum, epsilon) + } + + @Test def doubleStreamLimit_NegativeArg(): Unit = { + val s = DoubleStream.of(1.1, 2.2, 3.3) + assertThrows(classOf[IllegalArgumentException], s.limit(-1)) + } + + @Test def doubleStreamLimit(): Unit = { + val expectedCount = 10 + var data = -1 + + val s0 = DoubleStream.iterate( + 1.61803, + e => e + 1.0 + ) + + val s1 = s0.limit(expectedCount) + + assertEquals(s"unexpected element count", expectedCount, s1.count()) + } + + @Test def doubleStreamMap(): Unit = { + val nElements = 4 + val prefix = "mapped_" + var count = 0 + + val s0 = DoubleStream.of(1.1, 2.2, 3.3, 4.4) + + val s1 = s0.map((e) => { + count += 1 + s"${prefix}${e}" + e * 10 + }) + + /* Check that the right number of elements, each with the expected form, + * are generated. + * + * "map()" is an intermediate, not terminal operation. + * Do the terminal "forEach()" first, to force the counting in the mapper. + */ + s1.forEach((e) => + assertTrue( + s"unexpected map element: ${e}", + (e > 10.0) && (e < 45.0) + ) + ) + assertEquals("unexpected count", nElements, count) + } + + /* // Not Yet Implemented, needs IntStream first + @Test def doubleStreamMapToInt(): Unit = { + val nElements = 4 + var count = 0 + + val s0 = DoubleStream.of(1.1, 2.2, 3.3, 4.4) + + val s1 = s0.mapToInt((e) => e.toInt) + + // Right resultant types + s1.forEach(e => + count += 1 + assertEquals (s"unexpected type", + classOf[Int], + e.getClass()) + ) + + // Right count + assertEquals("unexpected count", nElements, count) + + // Right content + val s2 = DoubleStream.of(1.1, 2.2, 3.3, 4.4) + + val s3 = s2.mapToInt((e) => e.toInt) + + val it = s3.iterator() + + for (j <- 1 to nElements) + assertEquals("unexpected element", j, it.nextDouble()) + } + */ // Not Yet Implemented + + /* // Not Yet Implemented, needs LongStream first + @Test def doubleStreamMapToLong: Unit = { + val nElements = 4 + var count = 0 + + val s0 = DoubleStream.of(1.1, 2.2, 3.3, 4.4) + + /val s1 = s0.mapToLong((e) => e.toLong) + + // Right resultant types + s1.forEach(e => + count += 1 + assertEquals (s"unexpected type", + classOf[Long], + e.getClass()) + ) + + // Right count + assertEquals("unexpected count", nElements, count) + + // Right content + val s2 = DoubleStream.of(1.1, 2.2, 3.3, 4.4) + + val s3 = s2.mapToLong((e) => e.toLong) + + val it = s3.iterator() + + for (j <- 1 to nElements) + assertEquals("unexpected element", j.toLong, it.nextDouble()) + } + */ // Not Yet Implemented + + @Test def doubleStreamMapToObj(): Unit = { + val nElements = 4 + val prefix = "mapped_" + var count = 0 + + val s0 = DoubleStream.of(1.1, 2.2, 3.3, 4.4) + + val s1 = s0.mapToObj[String]((e) => { + count += 1 + s"${prefix}${e}" + }) + + /* Check that the right number of elements, each with the expected form, + * are generated. + * + * "mapToObj()" is an intermediate, not terminal operation. + * Do the terminal "forEach()" first, to force the counting in the mapper. + */ + + s1.forEach((e) => + /* Type check logic: + * The compiler expects the resultant element type to be String + * or else it would not allow the "startsWith()" below. + * Simlarly, if the runtime type is not String, that call would + * error. A pretty convincing case for having Strings here. + */ + + assertTrue( + s"unexpected map element: ${e}", + e.startsWith(prefix) + ) + ) + assertEquals("unexpected count", nElements, count) + } + + @Test def doubleStreamNoneMatch_EmptyStream(): Unit = { + val s = DoubleStream.empty() + var predEvaluated = false + + val noneMatched = s.noneMatch((e) => { predEvaluated = true; true }) + assertTrue("unexpected match", noneMatched) + assertFalse("predicate should not have been evaluated", predEvaluated) + } + + @Test def doubleStreamNoneMatch_True(): Unit = { + val s = DoubleStream.of(0.0, 1.1, 2.2, 3.3) + + val matched = s.noneMatch((e) => e < 0.0) + assertTrue("unexpected predicate failure", matched) + } + + @Test def doubleStreamNone_MatchFalse(): Unit = { + val s = DoubleStream.of(0.0, 1.1, 2.2, 3.3) + + val matched = s.noneMatch((e) => e > 2.2) + assertFalse("unexpected predicate failure", matched) + } + + @Test def doubleStreamMax_EmptyStream(): Unit = { + val s = DoubleStream.empty() + + val max = s.max() + + assertFalse("max optional should be empty", max.isPresent) + } + + @Test def doubleStreamMax(): Unit = { + val stream = DoubleStream.of(85.85, 4.4, 87.87, 25.25, 7.7) + + val maxOpt = stream.max() + + assertTrue("max not found", maxOpt.isPresent()) + + assertEquals( + "wrong max item found", + 87.87, + maxOpt.getAsDouble(), + epsilon + ) + } + + @Test def doubleStreamMax_NaN(): Unit = { + val stream = DoubleStream.of(85.85, Double.NaN, 87.87, 25.25, 7.7) + + val maxOpt = stream.max() + + assertTrue("max not found", maxOpt.isPresent()) + + assertEquals( + "wrong max item found", + Double.NaN, + maxOpt.getAsDouble(), + epsilon + ) + } + + @Test def doubleStreamMax_NegativeZero(): Unit = { + val stream = DoubleStream.of(-85.85, -0.0, -87.87, -25.25, -7.7) + + val maxOpt = stream.max() + + assertTrue("max not found", maxOpt.isPresent()) + + /* This Test expects a -0.0, exactly, not a -0.0 squashed to 0.0. + * ==, <, and > will conflate -0.0 and 0.0: i.e. -0.0 == 0.0. + * Double.compare will distinguish them: i.e. -0.0 != 0.0. + */ + assertEquals( + s"wrong max item found: '${maxOpt.getAsDouble()}'", + 0, + jl.Double.compare(-0.0, maxOpt.getAsDouble()) // distinguish -0.0 + ) + } + + @Test def doubleStreamMin_EmptyStream(): Unit = { + val s = DoubleStream.empty() + + val minOpt = s.min() + + assertFalse("min optional should be empty", minOpt.isPresent) + } + + @Test def doubleStreamMin(): Unit = { + val stream = DoubleStream.of(85.85, 4.4, 87.87, 25.25, 7.7) + + val minOpt = stream.min() + + assertTrue("min not found", minOpt.isPresent()) + + assertEquals( + "wrong min item found", + 4.4, + minOpt.getAsDouble(), + epsilon + ) + } + + @Test def doubleStreamMin_NaN(): Unit = { + val stream = DoubleStream.of(85.85, Double.NaN, 87.87, 25.25, 7.7) + + val minOpt = stream.min() + + assertTrue("min not found", minOpt.isPresent()) + + assertEquals( + "wrong min item found", + Double.NaN, + minOpt.getAsDouble(), + epsilon + ) + } + + @Test def doubleStreamMin_NegativeZero(): Unit = { + val stream = DoubleStream.of(85.85, -0.0, 87.87, 0.0, 25.25, 7.7) + + val minOpt = stream.min() + + assertTrue("min not found", minOpt.isPresent()) + + /* This Test expects a -0.0, exactly, not a -0.0 squashed to 0.0. + * ==, <, and > will conflate -0.0 and 0.0: i.e. -0.0 == 0.0. + * Double.compare will distinguish them: i.e. -0.0 != 0.0. + */ + assertEquals( + s"wrong min item found: '${minOpt.getAsDouble()}'", + 0, + jl.Double.compare(-0.0, minOpt.getAsDouble()) // distinguish -0.0 + ) + } + + /* @Ignore this test and leave it in place. The results are better evaluated + * visually/manually rather than automatically. + * JVM documentations suggests that "peek()" be mainly used for debugging. + */ + @Ignore + @Test def doubleStreamPeek(): Unit = { + val expectedCount = 3 + + val s = Stream.of("Animal", "Vegetable", "Mineral") + + // The ".count()" is a terminal operation to force the pipeline to + // evalute. The real interest is if the peek() side-effect happened + // correctly. Currently that can only be evaluated manually/visually. + val n = s.peek((e) => printf(s"peek: |${e}||\n")).count() + + assertEquals(s"unexpected count", expectedCount, n) + } + + @Ignore // see @Ignore comment above "streamShouldPeek()" above. + @Test def doubleStreamPeek_CompositeStream(): Unit = { + // Test that peek() works with all substreams of a composite stream. + val expectedCount = 10 + + // See ".count()" comment in streamShouldPeek above. + + // One should see the original data before and then after transformation + // done by flatmap to each original element. Something like: + // before: <1> + // after: <1> + // before: <2> + // after: <1> + // after: <2> + // before: <3> + // after: <1> + // after: <2> + // after: <3> + // before: <4> + // after: <1> + // after: <2> + // after: <3> + // after: <4> + + val n = Stream + .of(1, 2, 3, 4) + .peek((e) => printf(s"composite peek - before: <${e}>|\n")) // simple str + .flatMap((e) => Stream.of((1 to e): _*)) + .peek((e) => printf(s"composite peek - after: <${e}>|\n")) // composite + .count() + + assertEquals(s"unexpected count", expectedCount, n) + } + + @Test def doubleStreamReduce_OneArgEmpty(): Unit = { + val s = DoubleStream.empty() + + val optional: OptionalDouble = s.reduce((r, e) => r + e) + + assertFalse("unexpected non-empty optional", optional.isPresent()) + } + + @Test def doubleStreamReduce_OneArg(): Unit = { + val s = DoubleStream.of(3.3, 5.5, 7.7, 11.11) + val expectedSum = 27.61 + + val optional: OptionalDouble = s.reduce((r, e) => r + e) + + assertTrue("unexpected empty optional", optional.isPresent()) + assertEquals( + "unexpected reduction result", + expectedSum, + optional.getAsDouble(), + epsilon + ) + } + + @Test def doubleStreamReduce_TwoArgEmpty(): Unit = { + val s = DoubleStream.empty() + + val firstArg = 1.1 + + val product: Double = s.reduce(firstArg, (r, e) => r * e) + + assertEquals("unexpected reduction result", firstArg, product, epsilon) + } + + @Test def doubleStreamReduce_TwoArg(): Unit = { + val s = DoubleStream.of(3.3, 5.5, 7.7, 11.11) + val expectedProduct = 1552.67805 + + val product: Double = s.reduce(1, (r, e) => r * e) + + assertEquals( + "unexpected reduction result", + expectedProduct, + product, + epsilon + ) + } + + @Test def doubleStreamSkip_NegativeArg(): Unit = { + val s = DoubleStream.of(1.1, 2.2, 3.3) + assertThrows(classOf[IllegalArgumentException], s.skip(-1)) + } + + @Test def doubleStreamSkip_TooMany(): Unit = { + val s = DoubleStream.of(1.1, 2.2, 3.3) + + val isEmptyStream = !s.skip(10).iterator.hasNext() + assertTrue("expected empty stream", isEmptyStream) + } + + @Test def doubleStreamSkip(): Unit = { + val expectedValue = 99.99 + val s = DoubleStream.of(1.1, 2.2, 3.3, 4.4, expectedValue, 6.6, 7.7) + + val iter = s.skip(4).iterator() + + assertTrue("expected non-empty stream", iter.hasNext()) + assertEquals( + "unexpected first value: ", + expectedValue, + iter.nextDouble(), + epsilon + ) + } + + @Test def doubleStreamSorted(): Unit = { + val nElements = 8 + val wild = new Array[Double](nElements) + + // Ensure that the Elements are not inserted in sorted or reverse order. + wild(0) = 45.32 + wild(1) = 21.4 + wild(2) = 11.2 + wild(3) = 31.5 + wild(4) = 68.16 + wild(5) = 3.77 + wild(6) = 61.44 + wild(7) = 9.60 + + val ordered = new Array[Double](nElements) + ordered(0) = 3.77 + ordered(1) = 9.60 + ordered(2) = 11.2 + ordered(3) = 21.4 + ordered(4) = 31.5 + ordered(5) = 45.32 + ordered(6) = 61.44 + ordered(7) = 68.16 + + val s = DoubleStream.of(wild: _*) + + val sorted = s.sorted() + + var count = 0 + + sorted.forEachOrdered((e) => { + assertEquals("mismatched elements", ordered(count), e, epsilon) + count += 1 + }) + + val msg = + if (count == 0) "unexpected empty stream" + else "unexpected number of elements" + + assertEquals(msg, nElements, count) + } + + @Test def doubleStreamSum(): Unit = { + val nElements = 9 + + val wild = new Array[Double](nElements) // holds arbitrarily jumbled data + wild(0) = 45.32 + wild(1) = 21.4 + wild(2) = 11.2 + wild(3) = 31.5 + wild(4) = 68.16 + wild(5) = 3.77 + wild(6) = 61.44 + wild(7) = 9.60 + + val expectedSum = 252.39 + + val s = DoubleStream.of(wild: _*) + + val sum = s.sum() + + assertEquals("unexpected sum", expectedSum, sum, epsilon) + } + + @Test def doubleStreamSummaryStatistics(): Unit = { + val nElements = 8 + + val wild = new Array[Double](nElements) // holds arbitrarily jumbled data + wild(0) = 45.32 + wild(1) = 21.4 + wild(2) = 11.2 + wild(3) = 31.5 + wild(4) = 68.16 + wild(5) = 3.77 + wild(6) = 61.44 + wild(7) = 9.60 + + val expectedAverage = 31.54875 + val expectedCount = nElements + val expectedMax = 68.16 + val expectedMin = 3.77 + val expectedSum = 252.39 + + val s = DoubleStream.of(wild: _*) + + val stats = s.summaryStatistics() + + assertEquals( + "unexpected average", + expectedAverage, + stats.getAverage(), + epsilon + ) + + assertEquals("unexpected count", expectedCount, stats.getCount()) + + assertEquals("unexpected max", expectedMax, stats.getMax(), epsilon) + + assertEquals("unexpected min", expectedMin, stats.getMin(), epsilon) + + assertEquals("unexpected sum", expectedSum, stats.getSum(), epsilon) + } + + @Test def doubleStreamToArray(): Unit = { + val nElements = 9 + + val wild = new Array[Double](nElements) // holds arbitrarily jumbled data + wild(0) = 45.32 + wild(1) = 21.4 + wild(2) = 11.2 + wild(3) = 31.5 + wild(4) = 68.16 + wild(5) = 3.77 + wild(6) = 61.44 + wild(7) = 9.60 + + val s = DoubleStream.of(wild: _*) + + val resultantArray = s.toArray() + + // Proper size + assertEquals("result size", nElements, resultantArray.size) + + // Proper elements, in encounter order + for (j <- 0 until nElements) + assertEquals("elements do not match", wild(j), resultantArray(j), epsilon) + } + +} diff --git a/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/stream/StreamTest.scala b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/stream/StreamTest.scala index 3b84812552..c7414bccda 100644 --- a/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/stream/StreamTest.scala +++ b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/javalib/util/stream/StreamTest.scala @@ -1,12 +1,51 @@ package org.scalanative.testsuite.javalib.util.stream +import java.{util => ju} +import java.util._ + +import java.util.concurrent.{CountDownLatch, TimeUnit} +import java.util.concurrent.CountDownLatch._ + +import java.util.function._ + +import java.util.{stream => jus} import java.util.stream._ -import java.util.function.{Consumer, Function} import org.junit.Test import org.junit.Assert._ +import org.junit.Ignore + +import org.scalanative.testsuite.utils.AssertThrows.assertThrows class StreamTest { + /* Design Note: + * Scala 2.12 requires the type in many lamba expressions: + * (e: String) => { body } + * + * This may look excessive and unnecessary to those used to Scala 2.13 + * and Scala 3. + * + * Taking a hit on a few style points allows this one file to run + * on the full range of version supported by Scala Native. 'Tis + * a pity that it reduces its utility as a model for the full power of + * streams. + */ + + private def streamOfSingleton[T](single: T): Stream[T] = { + /* Scala Native Tests must support a range of Scala Versions, currently: + * 2.12.13 to 3.2.2 (soon to be 3.3.0). + * Scala 2.13.* and 3.* can distinguish between singleton and varargs + * overloads of Stream.of(), allows the use of the simpler: + * val s = Stream.of(expected) + * + * This tour of Robin Hood's barn allows Scala 2.12 Tests to run + * without even more complication. + */ + val al = new ArrayList[T](1) + al.add(single) + al.stream() + } + @Test def streamBuilderCanBuildAnEmptyStream(): Unit = { val s = Stream.builder().build() val it = s.iterator() @@ -19,12 +58,22 @@ class StreamTest { assertFalse(it.hasNext()) } - @Test def streamOfCanPutElementsInStream(): Unit = { + @Test def streamOf_SingleElement(): Unit = { + val expected = 7 + + val s = streamOfSingleton[Int](expected) + val it = s.iterator() + assertTrue("stream should not be empty", it.hasNext()) + assertEquals("unexpected element", it.next(), expected) + assertFalse("stream should be empty and is not.", it.hasNext()) + } + + @Test def streamOf_MultipleIntElements(): Unit = { val s = Stream.of(1, 2, 3) val it = s.iterator() - assertTrue(it.next() == 1) - assertTrue(it.next() == 2) - assertTrue(it.next() == 3) + assertEquals("element_1", 1, it.next()) + assertEquals("element_2", 2, it.next()) + assertEquals("element_3", 3, it.next()) assertFalse(it.hasNext()) } @@ -46,7 +95,7 @@ class StreamTest { assertFalse(it.hasNext()) } - @Test def streamForeachWorks(): Unit = { + @Test def streamForEachWorks(): Unit = { val s = Stream.of(1, 2, 3) var sum = 0 val consumer = new Consumer[Int] { @@ -78,11 +127,969 @@ class StreamTest { } @Test def streamOnCloseWorks(): Unit = { - var success = false - val handler = new Runnable { override def run(): Unit = success = true } - val s = Stream.empty[Int]().onClose(handler) - assertFalse(success) + var latch = new CountDownLatch(1) + + class Closer(cdLatch: CountDownLatch) extends Runnable { + override def run(): Unit = cdLatch.countDown() + } + + val s = Stream.empty[Int]().onClose(new Closer(latch)) s.close() - assertTrue(success) + + val timeout = 30L + assertTrue( + "close handler did not run within ${timeout} seconds", + latch.await(timeout, TimeUnit.SECONDS) + ) + } + +// Static methods ------------------------------------------------------- + + @Test def streamConcat(): Unit = { + val a = Stream.of("Q", "R", "X", "Y", "Z") + val b = Stream.of("A", "B", "C") + + val s = Stream.concat(a, b) + + val it = s.iterator() + assertNotNull("s.iterator() should not be NULL", it) + + assertTrue("stream should not be empty", it.hasNext()) + + assertEquals(s"element", "Q", it.next()) + assertEquals(s"element", "R", it.next()) + assertEquals(s"element", "X", it.next()) + assertEquals(s"element", "Y", it.next()) + assertEquals(s"element", "Z", it.next()) + + assertEquals(s"element", "A", it.next()) + assertEquals(s"element", "B", it.next()) + assertEquals(s"element", "C", it.next()) + + assertFalse("stream should be empty", it.hasNext()) + } + + @Test def streamGenerate(): Unit = { + val nElements = 5 + val data = new ArrayList[String](nElements) + data.add("Zero") + data.add("One") + data.add("Two") + data.add("Three") + data.add("Four") + + val src = new Supplier[String]() { + type T = String + var count = -1 + + def get(): T = { + count += 1 + data.get(count % nElements) + } + } + + val s = Stream.generate(src) + + val it = s.iterator() + + assertTrue("stream should not be empty", it.hasNext()) + + for (j <- 0 until nElements) + assertTrue(s"data(${j})", it.next() == data.get(j)) + + assertTrue("stream should not be empty", it.hasNext()) + } + + @Test def streamIterate_Unbounded(): Unit = { + val nElements = 4 + var count = -1 + + val expectedSeed = "Woody Woodpecker" + val s = Stream.iterate[String]( + expectedSeed, + (e: String) => { + count += 1 + count.toString() + } + ) + + val it = s.iterator() + + assertTrue("stream should not be empty", it.hasNext()) + + assertEquals("seed", expectedSeed, it.next()) + + for (j <- 0 until nElements) + assertEquals(s"element: ${j})", String.valueOf(j), it.next()) + + assertTrue("stream should not be empty", it.hasNext()) + } + + @Test def streamOf_NoItems(): Unit = { + val s = Stream.of() + + val it = s.iterator() + assertFalse("stream should be empty", it.hasNext()) + } + + @Test def streamOf_OneItem(): Unit = { + val expectedString = "Only" + + val s = streamOfSingleton[String](expectedString) + + val it = s.iterator() + assertTrue("stream should not be empty", it.hasNext()) + assertEquals(s"element", expectedString, it.next()) + + assertFalse("stream should be empty", it.hasNext()) + } + + // During development, sometimes two elements were taken to be 1 Tuple2 + // Guard against regression. + @Test def streamOf_TwoItems(): Unit = { + val expectedString_1 = "RedSox" + val expectedString_2 = "Lightening" + val s = Stream.of(expectedString_1, expectedString_2) + + val it = s.iterator() + assertTrue("stream should not be empty", it.hasNext()) + assertEquals(s"element_1", expectedString_1, it.next()) + assertEquals(s"element_2", expectedString_2, it.next()) + + assertFalse("stream should be empty", it.hasNext()) + } + + // Stream.of() with more than two arguments is exercised in many other + // places in this file, so no Test for that case here. + +// Instance methods ----------------------------------------------------- + + @Test def streamAllMatch_EmptyStream(): Unit = { + val s = Stream.empty[String] + var predEvaluated = false + + val matched = s.allMatch((e) => { predEvaluated = true; true }) + assertTrue("unexpected match failure", matched) + assertFalse("predicate should not have been evaluated", predEvaluated) + } + + @Test def streamAllMatch_True(): Unit = { + + /* stream.allMatch() will return "true" on an empty stream. + * Try to distinguish that "true" from an actual all-elements-match "true" + * Since streams can not be re-used, count s0. If it is non-empty, assume + * its sibling s is also non-empty, distingishing the two "true"s. + */ + val s0 = Stream.of("Air", "Earth", "Fire", "Water") + assertTrue("unexpected empty stream", s0.count > 0) + + val s = Stream.of("Air", "Earth", "Fire", "Water") + + val matched = s.allMatch((e) => { e.contains("a") || e.contains("i") }) + assertTrue("unexpected match failure", matched) + } + + @Test def streamAllMatch_False(): Unit = { + val s = Stream.of("Air", "Earth", "Fire", "Water") + + val matched = s.allMatch((e) => e.contains("a")) + assertFalse("unexpected match", matched) + } + + @Test def streamAnyMatch_EmptyStream(): Unit = { + val s = Stream.empty[String] + var predEvaluated = false + + val matched = s.anyMatch((e) => { predEvaluated = true; true }) + assertFalse("unexpected match", matched) + assertFalse("predicate should not have been evaluated", predEvaluated) + } + + @Test def streamAnyMatch_True(): Unit = { + val s = Stream.of("Air", "Earth", "Fire", "Water") + + val matched = s.anyMatch((e) => e.contains("a")) + assertTrue("unexpected predicate failure", matched) + } + + @Test def streamAnyMatch_False(): Unit = { + val s = Stream.of("Air", "Earth", "Fire", "Water") + + val matched = s.anyMatch((e) => e.contains("X")) + assertFalse("unexpected predicate failure", matched) + } + + @Test def streamCollect_EmptyStreamUsingCollector(): Unit = { + val sisters = new ArrayList[String](0) + + val s = sisters.stream() + + val collected = s.collect(Collectors.toList()) + + // Proper size (empty) + assertEquals("list size", 0, collected.size()) + } + + @Test def streamCollect_UsingCollector(): Unit = { + val nElements = 7 + val sisters = new ArrayList[String](nElements) + sisters.add("Maya") + sisters.add("Electra") + sisters.add("Taygete") + sisters.add("Alcyone") + sisters.add("Celaeno") + sisters.add("Sterope") + sisters.add("Merope") + + val s = sisters.stream() + + val collected = s.collect(Collectors.toList()) + + // Proper size + assertEquals("list size", nElements, collected.size()) + + // Proper elements, in encounter order + for (j <- 0 until nElements) + assertEquals("list element", sisters.get(j), collected.get(j)) + } + + @Test def streamCollect_EmptyStreamUsingSupplier(): Unit = { + type U = ArrayList[String] + + val sisters = new U(0) + + val s = sisters.stream() + + val supplier = new Supplier[U]() { + def get(): U = new U() + } + + val collected = s.collect( + supplier, + (list: U, e: String) => list.add(e), + (list1: U, list2: U) => list1.addAll(list2) + ) + + // Proper size + assertEquals("list size", 0, collected.size()) + } + + @Test def streamCollect_UsingSupplier(): Unit = { + type U = ArrayList[String] + + val nElements = 7 + val sisters = new U(nElements) + sisters.add("Phaisyle") + sisters.add("Coronis") + sisters.add("Cleeia") + sisters.add("Phaeo") + sisters.add("Eudora") + sisters.add("Ambrosia") + sisters.add("Dione") + + val s = sisters.stream() + + val supplier = new Supplier[U]() { + def get(): U = new U() + } + + val collected = s.collect( + supplier, + (list: U, e: String) => list.add(e), + (list1: U, list2: U) => list1.addAll(list2) + ) + + // Proper size + assertEquals("list size", nElements, collected.size()) + + // Proper elements, in encounter order + for (j <- 0 until nElements) + assertEquals("list element", sisters.get(j), collected.get(j)) + } + + @Test def streamCount(): Unit = { + val expectedCount = 4 + + val s = jus.Stream.of[String]("A", "B", "C", "D") + + assertEquals(s"unexpected element count", expectedCount, s.count()) + } + + @Test def streamCount_compositeStream(): Unit = { + // Test that count() works with all substreams of a composite stream. + val expectedCount = 15 + + val n = Stream + .of(1, 2, 3, 4, 5) + .flatMap((e) => Stream.of((1 to e): _*)) + .count() + + assertEquals(s"unexpected count", expectedCount, n) + } + + @Test def streamDistinct(): Unit = { + val expectedCount = 5 + val range = 0 until expectedCount + + val expectedElements = Array.ofDim[Int](expectedCount) + for (j <- range) + expectedElements(j) = j + 1 + + val expectedSet = new ju.HashSet[Int]() + for (j <- range) + expectedSet.add(expectedElements(j)) + + val s = jus.Stream + .of(expectedElements: _*) + .flatMap((e) => Stream.of((1 to e): _*)) + .distinct() + + assertEquals(s"unexpected count", expectedCount, s.count()) + + // Count is good, now did we get expected elements and only them? + + val s2 = jus.Stream + .of(expectedElements: _*) + .flatMap((e) => Stream.of((1 to e): _*)) + .distinct() + + s2.forEach((e) => { + val inSet = expectedSet.remove(e) + // Detect both unknown elements and + // occurances of unwanted, non-distinct elements + assertTrue(s"element ${e} not in expectedSet", inSet) + }) + + // Iff the stream was proper & distinct, the expected set should be empty. + assertTrue("expectedSet has remaining elements", expectedSet.isEmpty()) + } + + @Test def streamFindAny_Null(): Unit = { + val s = Stream.of(null.asInstanceOf[String], "NULL") + assertThrows(classOf[NullPointerException], s.findAny()) + } + + @Test def streamFindAny_True(): Unit = { + val s = Stream.of("Air", "Earth", "Fire", "Water") + val acceptableValues = Arrays.asList("Air", "Earth", "Fire", "Water") + + val optional = s.findAny() + + assertTrue("unexpected empty optional", optional.isPresent()) + + val found = optional.get() + assertTrue( + s"unexpected value: '${found}'", + acceptableValues.contains(found) + ) + } + + @Test def streamFindAny_False(): Unit = { + val s = Stream.empty[String]() + + val optional = s.findAny() + + assertFalse("unexpected failure", optional.isPresent()) + } + + @Test def streamFindFirst_Null(): Unit = { + val s = Stream.of(null.asInstanceOf[String], "NULL") + assertThrows(classOf[NullPointerException], s.findFirst()) + } + + @Test def streamFindFirst_True(): Unit = { + val expectedFirst = "Air" + val s = Stream.of(expectedFirst, "Earth", "Fire", "Water") + + val optional = s.findFirst() + + assertTrue("unexpected empty optional", optional.isPresent()) + assertEquals("unexpected mismatch", expectedFirst, optional.get()) + } + + @Test def streamFindFirst_False(): Unit = { + val s = Stream.empty[String]() + + val optional = s.findFirst() + + assertFalse("unexpected failure", optional.isPresent()) + } + + @Test def streamFilter(): Unit = { + val expectedCount = 4 + + val s0 = jus.Stream.of[String]("AA", "B", "CC", "D", "EE", "F", "G") + + val s1 = s0.filter((e) => e.length() == 1) + assertEquals(s"unexpected element count", expectedCount, s1.count()) + } + + @Test def streamFlatMapToDouble(): Unit = { + val expectedSum = 4.5 + + val s = jus.Stream.of[String]("AA", "B", "CC", "D", "EE", "F") + + /* Chose the items in S and the mapper function to yield an obviously + * floating point sum, not something that could be an Int implicitly + * converted to Double. + * Let the compiler distinguish Double as Object and Double + * as primitive. Only DoubleStream will have the sum method. + */ + + val sum = s.flatMapToDouble(e => DoubleStream.of(0.5 * e.length())).sum() + + assertEquals(s"unexpected sum", expectedSum, sum, 0.00001) } + + @Test def streamFlatMapToInt(): Unit = { + // Stream#flatMapToInt is Not Yet Implemented + } + + @Test def streamFlatMapToLong(): Unit = { + // Stream#flatMapToLong is Not Yet Implemented + } + + @Test def streamForeachOrdered(): Unit = { + val s = Stream.of(1, 2, 3, 4) + var sum = 0 + val consumer = new Consumer[Int] { + def accept(i: Int): Unit = sum += i + } + s.forEachOrdered(consumer) + assertEquals(10, sum) + } + + @Test def streamLimit_NegativeArg(): Unit = { + val s = Stream.of("X", "Y", "Z") + assertThrows(classOf[IllegalArgumentException], s.limit(-1)) + } + + @Test def streamLimit(): Unit = { + val expectedCount = 10 + var data = -1 + + val s0 = Stream.iterate[String]( + "seed", + (e: String) => { + data += 1 + data.toString() + } + ) + + val s1 = s0.limit(expectedCount) + + assertEquals(s"unexpected element count", expectedCount, s1.count()) + } + + @Test def streamMap(): Unit = { + val nElements = 4 + val prefix = "mapped_" + var count = 0 + + val s0 = jus.Stream.of[String]("A", "B", "C", "D") + + /* Scala 2.12 needs the ": Stream[String]" type ascription so it uses + * the proper Consumer type. + * Scala 2.13.* & 3.* do not need it (and it causes minimal harm). + */ + val s1: Stream[String] = s0.map((e: String) => { + count += 1 + s"${prefix}${e}" + }) + + /* Check that the right number of elements, each with the expected form, + * are generated. + * + * "map()" is an intermediate, not terminal operation. + * Do the terminal "forEach()" first, to force the counting in the mapper. + */ + + s1.forEach((e: String) => + assertTrue( + s"unexpected map element: ${e}", + e.startsWith(prefix) && + (e.endsWith("_A") || + e.endsWith("_B") || + e.endsWith("_C") || + e.endsWith("_D")) + ) + ) + + assertEquals("map has unexpected count", nElements, count) + } + + /* The mapMulti clade was introduce in Java 16, find Tests in + * StreamTestOnJDK16.scala + */ + + @Test def streamMapToDouble(): Unit = { + val expectedSum = 28.26 + + val s = jus.Stream.of[String]("AA", "B", "CC", "D", "EE", "F") + + /* Chose the items in S and the mapper function to yield an obviously + * floating point sum, not something that could be an Int implicitly + * converted to Double. + * Let the compiler distinguish Double as Object and Double + * as primitive. Only DoubleStream will have the sum method. + */ + + val sum = s.mapToDouble(e => 3.14 * e.length()).sum() + + assertEquals(s"unexpected sum", expectedSum, sum, 0.00001) + } + + @Test def streamNoneMatch_EmptyStream(): Unit = { + val s = Stream.empty[String] + var predEvaluated = false + + val noneMatched = s.noneMatch((e) => { predEvaluated = true; true }) + assertTrue("unexpected match", noneMatched) + assertFalse("predicate should not have been evaluated", predEvaluated) + } + + @Test def streamNoneMatch_True(): Unit = { + val s = Stream.of("Air", "Earth", "Fire", "Water") + + val matched = s.noneMatch((e) => e.contains("X")) + assertTrue("unexpected predicate failure", matched) + } + + @Test def streamNone_MatchFalse(): Unit = { + val s = Stream.of("Air", "Earth", "Fire", "Water") + + val matched = s.noneMatch((e) => e.contains("a")) + assertFalse("unexpected predicate failure", matched) + } + + @Test def streamMax_ComparatorNaturalOrderString(): Unit = { + val stream = Stream.of("85", "4", "87", "25", "7") + + val maxOpt = stream.max(Comparator.naturalOrder[String]()) + + assertTrue("max not found", maxOpt.isPresent()) + + assertEquals( + "wrong max item found", + "87", + maxOpt.get() + ) + } + + @Test def streamMin_ComparatorNaturalOrderString(): Unit = { + val stream = Stream.of("85", "4", "87", "25", "7") + + val minOpt = stream.min(Comparator.naturalOrder[String]()) + + assertTrue("min not found", minOpt.isPresent()) + + assertEquals( + "wrong min item found", + "25", // These are string, not primitive, comparisions, so min is not "4" + minOpt.get() + ) + } + + case class Item(name: String, upc: Int) + + val itemComparator = new ju.Comparator[Item] { + def compare(item1: Item, item2: Item): Int = + item1.upc - item2.upc + } + + @Test def streamMax_EmptyStream(): Unit = { + val items = new ArrayList[Item](0) + + val s = items.stream() + + val maxOpt = s.max(itemComparator) + + assertFalse("max optional should be empty", maxOpt.isPresent) + } + + @Test def streamMax(): Unit = { + val nElements = 7 + val items = new ArrayList[Item](nElements) + items.add(Item("Maya", 1)) + items.add(Item("Electra", 2)) + items.add(Item("Taygete", 3)) + items.add(Item("Alcyone", 4)) + items.add(Item("Celaeno", 5)) + items.add(Item("Sterope", 6)) + items.add(Item("Merope", 7)) + + val s = items.stream() + + val maxOpt = s.max(itemComparator) + + assertTrue("max not found", maxOpt.isPresent) + assertEquals( + "wrong max item found", + items.get(nElements - 1).name, + maxOpt.get().name + ) + } + + @Test def streamMin_EmptyStream(): Unit = { + val items = new ArrayList[Item](0) + + val s = items.stream() + + val minOpt = s.min(itemComparator) + + assertFalse("min optional should be empty", minOpt.isPresent) + } + + @Test def streamMin(): Unit = { + val nElements = 7 + val items = new ArrayList[Item](nElements) + // Mix up the item.upc field so that first item is not minimum. + // Some (faulty) algorithms might always report the first item. + items.add(Item("Maya", 2)) + items.add(Item("Electra", 1)) + items.add(Item("Taygete", 3)) + items.add(Item("Alcyone", 4)) + items.add(Item("Celaeno", 5)) + items.add(Item("Sterope", 6)) + items.add(Item("Merope", 7)) + + val s = items.stream() + + val minOpt = s.min(itemComparator) + + assertTrue("min not found", minOpt.isPresent) + assertEquals("wrong min item found", items.get(1).name, minOpt.get().name) + } + + /* @Ignore this test and leave it in place. The results are better + * evaluated visually/manually rather than automatically. + * JVM documentations suggests that "peek()" be mainly used for debugging. + */ + @Ignore + @Test def streamPeek(): Unit = { + val expectedCount = 3 + + val s = Stream.of("Animal", "Vegetable", "Mineral") + + /* The ".count()" is a terminal operation to force the pipeline to + * evalute. The real interest is if the peek() side-effect happened + * correctly. Currently that can only be evaluated manually/visually. + */ + val n = s.peek((e) => printf(s"peek: |${e}||\n")).count() + + assertEquals(s"unexpected count", expectedCount, n) + } + + @Ignore + @Test def streamPeek_CompositeStream(): Unit = { + // Test that peek() works with all substreams of a composite stream. + val expectedCount = 10 + + /* See ".count()" comment in streamShouldPeek above. + * + * One should see the original data before and then after transformation + * done by flatmap to each original element. Something like: + * before: <1> + * after: <1> + * before: <2> + * after: <1> + * after: <2> + * before: <3> + * after: <1> + * after: <2> + * after: <3> + * before: <4> + * after: <1> + * after: <2> + * after: <3> + * after: <4> + */ + val n = Stream + .of(1, 2, 3, 4) + .peek((e) => printf(s"composite peek - before: <${e}>|\n")) // simple str + .flatMap((e) => Stream.of((1 to e): _*)) + .peek((e) => printf(s"composite peek - after: <${e}>|\n")) // composite + .count() + + assertEquals(s"unexpected count", expectedCount, n) + } + + @Test def streamReduce_OneArgEmpty(): Unit = { + val s = Stream.empty[Int] + + val optional: Optional[Int] = s.reduce((r, e) => r + e) + + assertFalse("unexpected non-empty optional", optional.isPresent()) + } + + @Test def streamReduce_OneArg(): Unit = { + val s = Stream.of(3, 5, 7, 11) + val expectedSum = 26 + + val optional: Optional[Int] = s.reduce((r, e) => r + e) + + assertTrue("unexpected empty optional", optional.isPresent()) + assertEquals("unexpected reduction result", expectedSum, optional.get()) + } + + @Test def streamReduce_TwoArgEmpty(): Unit = { + val s = Stream.empty[Int] + val firstArg = 1 + + val product: Int = s.reduce( + firstArg, + (r: Int, e: Int) => r * e + ) + + assertEquals("unexpected reduction result", firstArg, product) + } + + @Test def streamReduce_TwoArg(): Unit = { + val s = Stream.of(3, 5, 7, 11) + val expectedProduct = 1155 + + val product: Int = s.reduce( + 1, + (r: Int, e: Int) => r * e + ) + + assertEquals("unexpected reduction result", expectedProduct, product) + } + + @Test def streamReduce_ThreeArgEmpty(): Unit = { + val s = Stream.empty[Int] + val firstArg = Int.MinValue + + val product: Int = s.reduce( + firstArg, + (r: Int, e: Int) => Math.max(r, e), + (r: Int, e: Int) => if (r >= e) r else e + ) + + assertEquals("unexpected reduction result", firstArg, product) + } + + @Test def streamReduce_ThreeArg(): Unit = { + + val stream = Stream.of(3, 17, 5, 13, 7, 19, 11) + val expectedMax = 19 + + val max: Int = stream.reduce( + Int.MinValue, + (r: Int, e: Int) => Math.max(r, e), + (r: Int, e: Int) => if (r >= e) r else e + ) + + assertEquals("unexpected reduction result", expectedMax, max) + } + + @Test def streamSkip_NegativeArg(): Unit = { + val s = Stream.of("X", "Y", "Z") + assertThrows(classOf[IllegalArgumentException], s.skip(-1)) + } + + @Test def streamSkip_TooMany(): Unit = { + val s = Stream.of("X", "Y", "Z") + + val isEmptyStream = !s.skip(10).iterator.hasNext() + assertTrue("expected empty stream", isEmptyStream) + } + + @Test def streamSkip(): Unit = { + val expectedValue = "V" + val s = Stream.of("R", "S", "T", "U", expectedValue, "X", "Y", "Z") + + val iter = s.skip(4).iterator() + + assertTrue("expected non-empty stream", iter.hasNext()) + assertEquals("unexpected first value: ", expectedValue, iter.next()) + } + + @Test def streamSorted(): Unit = { + val nElements = 8 + val wild = new ArrayList[String](nElements) + + // Ensure that the Elements are not inserted in sorted or reverse order. + wild.add("Dasher") + wild.add("Prancer") + wild.add("Vixen") + wild.add("Comet") + wild.add("Cupid") + wild.add("Donner") + wild.add("Blitzen") + wild.add("Rudolph") + + val ordered = new ArrayList(wild) + ju.Collections.sort(ordered) + + val s = wild.stream() + + val alphabetized = s.sorted() + + var count = 0 + + alphabetized.forEachOrdered((e) => { + assertEquals("mismatched elements", ordered.get(count), e) + count += 1 + }) + + val msg = + if (count == 0) "unexpected empty stream" + else "unexpected number of elements" + + assertEquals(msg, nElements, count) + } + + @Test def streamSorted_UsingComparator(): Unit = { + val nElements = 8 + val wild = new ArrayList[String](nElements) + + // Ensure that the Elements are not inserted in sorted or reverse order. + wild.add("Dasher") + wild.add("Prancer") + wild.add("Vixen") + wild.add("Comet") + wild.add("Cupid") + wild.add("Donner") + wild.add("Blitzen") + wild.add("Rudolph") + + val ordered = new ArrayList[String](nElements) + ordered.add("Blitzen") + ordered.add("Comet") + ordered.add("Cupid") + ordered.add("Dasher") + ordered.add("Donner") + ordered.add("Prancer") + ordered.add("Rudolph") + ordered.add("Vixen") + + val s = wild.stream() + + val reverseOrdered = s.sorted(Comparator.reverseOrder()) + + val startIndex = nElements - 1 + var count = 0 + + reverseOrdered.forEachOrdered((e) => { + val index = startIndex - count + assertEquals("mismatched elements", ordered.get(index), e) + count += 1 + }) + + val msg = + if (count == 0) "unexpected empty stream" + else "unexpected number of elements" + + assertEquals(msg, nElements, count) + } + + @Test def streamSorted_UsingCustomComparator(): Unit = { + + // A simple class to mix things up. + // Try something a user in the wild might do and expect to work. + case class Datum(name: String, expectedOrder: Int) + + val nElements = 8 + val data = new ArrayList[Datum](nElements) + // Ensure that the Elements are not inserted in sorted or reverse order. + + /* The second field is the expected encounter order in the reverse sorted + * stream. + * That is, "Vixen" should be first in the output stream, so has 1. + * And so on... + */ + + data.add(Datum("Dasher", 5)) + data.add(Datum("Prancer", 3)) + data.add(Datum("Vixen", 1)) + data.add(Datum("Comet", 7)) + data.add(Datum("Cupid", 6)) + data.add(Datum("Donner", 4)) + data.add(Datum("Blitzen", 8)) + data.add(Datum("Rudolph", 2)) + + val s = data.stream() + + val reverseOrdered = s.sorted( + new Comparator[Datum]() { + def compare(o1: Datum, o2: Datum): Int = + o2.name.compare(o1.name) + } + ) + + var count = 0 + + reverseOrdered.forEachOrdered((e) => { + count += 1 + assertEquals( + s"mismatched element ${e.name} index", + count, + e.expectedOrder + ) + }) + + val msg = + if (count == 0) "unexpected empty stream" + else "unexpected number of elements" + + assertEquals(msg, nElements, count) + } + + @Test def streamToArrayObject(): Unit = { + val nElements = 7 + val sisters = new ArrayList[String](nElements) + sisters.add("Phaisyle") + sisters.add("Coronis") + sisters.add("Cleeia") + sisters.add("Phaeo") + sisters.add("Eudora") + sisters.add("Ambrosia") + sisters.add("Dione") + + val s = sisters.stream() + + val resultantArray = s.toArray() + + // Proper size + assertEquals("result size", nElements, resultantArray.size) + + // Proper elements, in encounter order + for (j <- 0 until nElements) + assertEquals("elements do not match", sisters.get(j), resultantArray(j)) + } + + @Test def streamToArrayType(): Unit = { + val nElements = 7 + val sisters = new ArrayList[String](nElements) + sisters.add("Phaisyle") + sisters.add("Coronis") + sisters.add("Cleeia") + sisters.add("Phaeo") + sisters.add("Eudora") + sisters.add("Ambrosia") + sisters.add("Dione") + + val s = sisters.stream() + + val resultantArray = s.toArray( + new IntFunction[Array[String]]() { + def apply(value: Int): Array[String] = new Array[String](value) + } + ) + + // Proper type + assertTrue( + "Array element type not String", + resultantArray.isInstanceOf[Array[String]] + ) + + // Proper size + assertEquals("result size", nElements, resultantArray.size) + + // Proper elements, in encounter order + for (j <- 0 until nElements) + assertEquals("elements do not match", sisters.get(j), resultantArray(j)) + } + } diff --git a/unit-tests/shared/src/test/scala/org/scalanative/testsuite/scalalib/reflect/ClassTagTest.scala b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/scalalib/reflect/ClassTagTest.scala new file mode 100644 index 0000000000..31ce4863ea --- /dev/null +++ b/unit-tests/shared/src/test/scala/org/scalanative/testsuite/scalalib/reflect/ClassTagTest.scala @@ -0,0 +1,28 @@ +package org.scalanative.testsuite.scalalib.reflect + +import scala.reflect.ClassTag + +import org.junit._ +import org.junit.Assert._ + +class ClassTagTest { + + @Test def referentialEquality(): Unit = { + assertSame(ClassTag.Byte, implicitly[ClassTag[Byte]]) + assertSame(ClassTag.Short, implicitly[ClassTag[Short]]) + assertSame(ClassTag.Char, implicitly[ClassTag[Char]]) + assertSame(ClassTag.Int, implicitly[ClassTag[Int]]) + assertSame(ClassTag.Long, implicitly[ClassTag[Long]]) + assertSame(ClassTag.Float, implicitly[ClassTag[Float]]) + assertSame(ClassTag.Double, implicitly[ClassTag[Double]]) + assertSame(ClassTag.Boolean, implicitly[ClassTag[Boolean]]) + assertSame(ClassTag.Unit, implicitly[ClassTag[Unit]]) + assertSame(ClassTag.Object, implicitly[ClassTag[Object]]) + assertSame(ClassTag.AnyVal, implicitly[ClassTag[AnyVal]]) + assertSame(ClassTag.AnyRef, implicitly[ClassTag[AnyRef]]) + assertSame(ClassTag.Any, implicitly[ClassTag[Any]]) + // No implicit ClassTag in Scala 3 + assertSame(ClassTag.Nothing, ClassTag.Nothing) + assertSame(ClassTag.Null, ClassTag.Null) + } +} \ No newline at end of file