diff --git a/.clomonitor.yml b/.clomonitor.yml new file mode 100644 index 00000000000..09f54c0d2ef --- /dev/null +++ b/.clomonitor.yml @@ -0,0 +1,5 @@ +exemptions: + - check: artifacthub_badge + reason: "Artifact Hub doesn't support Java packages" + - check: signed_releases + reason: "Maven central releases are signed and there are no GitHub release artifacts" diff --git a/.codecov.yaml b/.codecov.yaml index c2abc07c91d..14cd4116a58 100644 --- a/.codecov.yaml +++ b/.codecov.yaml @@ -11,7 +11,7 @@ coverage: status: project: default: - target: 90% + target: 89% paths: - "!opencensus-shim/" - "!opentracing-shim/" diff --git a/.fossa.yml b/.fossa.yml new file mode 100644 index 00000000000..87c35f5bcae --- /dev/null +++ b/.fossa.yml @@ -0,0 +1,40 @@ +version: 3 + +targets: + only: + - type: gradle + exclude: + # these modules are not published and so consumers will not be exposed to them + - type: gradle + path: ./ + target: ':api:testing-internal' + - type: gradle + path: ./ + target: ':exporters:otlp:testing-internal' + - type: gradle + path: ./ + target: ':integration-tests' + - type: gradle + path: ./ + target: ':integration-tests:graal' + - type: gradle + path: ./ + target: ':integration-tests:graal-incubating' + - type: gradle + path: ./ + target: ':integration-tests:otlp' + - type: gradle + path: ./ + target: ':integration-tests:tracecontext' + - type: gradle + path: ./ + target: ':perf-harness' + - type: gradle + path: ./ + target: ':testing-internal' + +experimental: + gradle: + configurations-only: + # consumer will only be exposed to these dependencies + - runtimeClasspath diff --git a/.github/config/markdown-link-check-config.json b/.github/config/markdown-link-check-config.json deleted file mode 100644 index a458029d5d9..00000000000 --- a/.github/config/markdown-link-check-config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "retryOn429": true, - "aliveStatusCodes": [ - 200, - 403 - ] -} diff --git a/.github/renovate.json5 b/.github/renovate.json5 index 478b6ca84e8..2056841ddb7 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -1,9 +1,33 @@ { "$schema": "https://docs.renovatebot.com/renovate-schema.json", "extends": [ - "config:base" + "config:recommended", + "docker:pinDigests", + "helpers:pinGitHubActionDigests" ], "packageRules": [ + { + // this is to reduce the number of renovate PRs + "matchManagers": [ + "github-actions", + "dockerfile" + ], + "extends": ["schedule:weekly"], + "groupName": "weekly update" + }, + { + "matchPackageNames": [ + "io.opentelemetry.contrib:opentelemetry-aws-xray-propagator", + "io.opentelemetry.proto:opentelemetry-proto", + "io.opentelemetry.semconv:opentelemetry-semconv-incubating" + ], + // Renovate's default behavior is only to update from unstable -> unstable if it's for the + // major.minor.patch, under the assumption that you would want to update to the stable version + // of that release instead of the unstable version for a future release + // (TODO remove once the artifacts above release stable versions) + "ignoreUnstable": false, + "allowedVersions": "!/\\-SNAPSHOT$/" + }, { // junit-pioneer 2+ requires Java 11+ "matchPackageNames": ["org.junit-pioneer:junit-pioneer"], @@ -27,6 +51,10 @@ "matchPackageNames": ["org.jetbrains.kotlinx:kotlinx-coroutines-core"], "matchCurrentVersion": "1.5.2", "enabled": false + }, + { + "matchPackagePrefixes": ["com.diffplug.spotless"], + "groupName": "spotless packages" } ] } diff --git a/.github/repository-settings.md b/.github/repository-settings.md index 0d836539b5e..662f3849b33 100644 --- a/.github/repository-settings.md +++ b/.github/repository-settings.md @@ -5,62 +5,101 @@ Repository settings in addition to what's documented already at ## General > Pull Requests -* Allow squash merging > Default to pull request title +- Allow squash merging > Default to pull request title + +- Allow auto-merge ## Actions > General -* Fork pull request workflows from outside collaborators: +- Fork pull request workflows from outside collaborators: "Require approval for first-time contributors who are new to GitHub" (To reduce friction for new contributors, as the default is "Require approval for first-time contributors") -## Branch protections - -### `main` - -* Require branches to be up to date before merging: UNCHECKED - - (PR jobs take too long, and leaving this unchecked has not been a significant problem) - -* Status checks that are required: - - * EasyCLA - * required-status-check - -### `release/*` +- Workflow permissions + - Default permissions granted to the `GITHUB_TOKEN` when running workflows in this repository: + Read repository contents and packages permissions + - Allow GitHub Actions to create and approve pull requests: UNCHECKED + +## Rules > Rulesets + +### `main` and release branches + +- Targeted branches: + - `main` + - `release/*` +- Branch rules + - Restrict deletions: CHECKED + - Require linear history: CHECKED + - Require a pull request before merging: CHECKED + - Required approvals: 1 + - Require review from Code Owners: CHECKED + - Allowed merge methods: Squash + - Require status checks to pass + - Do not require status checks on creation: CHECKED + - Status checks that are required + - EasyCLA + - `required-status-check` + - `gradle-wrapper-validation` + - Block force pushes: CHECKED + - Require code scanning results: CHECKED + - CodeQL + - Security alerts: High or higher + - Alerts: Errors + +### `benchmarks` branch + +- Targeted branches: + - `benchmarks` +- Branch rules + - Restrict deletions: CHECKED + - Require linear history: CHECKED + - Block force pushes: CHECKED + +### Old-style release branches + +- Targeted branches: + - `v0.*` + - `v1.*` +- Branch rules + - Restrict creations: CHECKED + - Restrict updates: CHECKED + - Restrict deletions: CHECKED + +### Restrict branch creation + +- Targeted branches + - Exclude: + - `release/*` + - `renovate/**/*` + - `otelbot/**/*` + - `revert-*/**/*` (these are created when using the GitHub UI to revert a PR) +- Restrict creations: CHECKED + +### Restrict updating tags + +- Targeted tags + - All tags +- Restrict updates: CHECKED +- Restrict deletions: CHECKED -Same settings as above for `main`, except: - -* Restrict pushes that create matching branches: UNCHECKED - - (So that opentelemetrybot can create release branches) - -### `renovate/**/**`, and `opentelemetrybot/*` - -* Require status checks to pass before merging: UNCHECKED - - (So that renovate PRs can be rebased) - -* Restrict who can push to matching branches: UNCHECKED - - (So that bots can create PR branches in this repository) +## Branch protections -* Allow force pushes > Everyone +### `main`, `release/*` - (So that renovate PRs can be rebased) +- Restrict who can push to matching branches: CHECKED -* Allow deletions: CHECKED +## Code security and analysis - (So that bot PR branches can be deleted) +- Secret scanning: Enabled ## Secrets and variables > Actions -* `GPG_PASSWORD` - stored in OpenTelemetry-Java 1Password -* `GPG_PRIVATE_KEY` - stored in OpenTelemetry-Java 1Password -* `GRADLE_ENTERPRISE_ACCESS_KEY` - owned by [@jack-berg](https://github.com/jack-berg) - * Generated at https://ge.opentelemetry.io > My settings > Access keys - * format of env var is `ge.opentelemetry.io=`, - see [docs](https://docs.gradle.com/enterprise/gradle-plugin/#via_environment_variable) -* `SONATYPE_KEY` - owned by [@jack-berg](https://github.com/jack-berg) -* `SONATYPE_USER` - owned by [@jack-berg](https://github.com/jack-berg) +- `GPG_PASSWORD` - stored in OpenTelemetry-Java 1Password +- `GPG_PRIVATE_KEY` - stored in OpenTelemetry-Java 1Password +- `NVD_API_KEY` - stored in OpenTelemetry-Java 1Password + - Generated at https://nvd.nist.gov/developers/request-an-api-key + - Key is associated with [@trask](https://github.com/trask)'s gmail address +- `SONATYPE_KEY` - owned by [@jack-berg](https://github.com/jack-berg) +- `SONATYPE_USER` - owned by [@jack-berg](https://github.com/jack-berg) diff --git a/.github/scripts/get-prior-version.sh b/.github/scripts/get-prior-version.sh new file mode 100755 index 00000000000..ca4987f09ac --- /dev/null +++ b/.github/scripts/get-prior-version.sh @@ -0,0 +1,22 @@ +version=$(.github/scripts/get-version.sh) +if [[ $version =~ ^([0-9]+)\.([0-9]+)\.([0-9]+) ]]; then + major="${BASH_REMATCH[1]}" + minor="${BASH_REMATCH[2]}" + patch="${BASH_REMATCH[3]}" +else + echo "unexpected version: $version" + exit 1 +fi +if [[ $patch == 0 ]]; then + if [[ $minor == 0 ]]; then + prior_major=$((major - 1)) + prior_minor=$(grep -Po "^## Version $prior_major.\K[0-9]+" CHANGELOG.md | head -1) + prior_version="$prior_major.$prior_minor" + else + prior_version="$major.$((minor - 1)).0" + fi +else + prior_version="$major.$minor.$((patch - 1))" +fi + +echo $prior_version diff --git a/.github/scripts/markdown-link-check-with-retry.sh b/.github/scripts/markdown-link-check-with-retry.sh deleted file mode 100755 index 9a81e8df95b..00000000000 --- a/.github/scripts/markdown-link-check-with-retry.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -e - -# this script helps to reduce sporadic link check failures by retrying at a file-by-file level - -retry_count=3 - -for file in "$@"; do - for i in $(seq 1 $retry_count); do - if markdown-link-check --config "$(dirname "$0")/../config/markdown-link-check-config.json" \ - "$file"; then - break - elif [[ $i -eq $retry_count ]]; then - exit 1 - fi - sleep 5 - done -done diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 760c3e9999d..d796fc9a663 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -6,8 +6,13 @@ on: description: "The pull request # to backport" required: true +permissions: + contents: read + jobs: backport: + permissions: + contents: write # for git push to PR branch runs-on: ubuntu-latest steps: - run: | @@ -16,7 +21,7 @@ jobs: exit 1 fi - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: # history is needed to run git cherry-pick below fetch-depth: 0 diff --git a/.github/workflows/benchmark-tags.yml b/.github/workflows/benchmark-tags.yml new file mode 100644 index 00000000000..8cef6b9fcaa --- /dev/null +++ b/.github/workflows/benchmark-tags.yml @@ -0,0 +1,77 @@ +name: Benchmark Tags + +on: + workflow_dispatch: + +permissions: + contents: read + +jobs: + sdk-benchmark: + permissions: + contents: write # for git push to benchmarks branch + name: Benchmark SDK + runs-on: self-hosted + timeout-minutes: 10 + strategy: + fail-fast: false + matrix: + tag-version: + - v1.6.0 + - v1.7.0 + - v1.7.1 + - v1.10.0 + - v1.10.1 + - v1.11.0 + - v1.12.0 + - v1.13.0 + - v1.14.0 + - v1.15.0 + - v1.16.0 + - v1.17.0 + - v1.18.0 + - v1.19.0 + - v1.21.0 + - v1.22.0 + - v1.23.0 + - v1.23.1 + - v1.24.0 + - v1.25.0 + - v1.26.0 + - v1.27.0 + - v1.28.0 + - v1.29.0 + - v1.30.0 + - v1.30.1 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + ref: ${{ matrix.tag-version }} + + - id: setup-java + name: Set up Java for build + uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4.7.0 + with: + distribution: temurin + java-version: 17 + + - name: Set up gradle + uses: gradle/actions/setup-gradle@94baf225fe0a508e581a564467443d0e2379123b # v4.3.0 + - name: Run jmh + run: ./gradlew jmhJar + + - name: Run Benchmark + run: | + cd sdk/trace/build + java -jar libs/opentelemetry-sdk-trace-*-jmh.jar -rf json SpanBenchmark SpanPipelineBenchmark ExporterBenchmark + + - name: Store benchmark results + uses: benchmark-action/github-action-benchmark@d48d326b4ca9ba73ca0cd0d59f108f9e02a381c7 # v1.20.4 + with: + tool: 'jmh' + output-file-path: sdk/trace/build/jmh-result.json + gh-pages-branch: benchmarks + github-token: ${{ secrets.GITHUB_TOKEN }} + benchmark-data-dir-path: "benchmarks" + auto-push: true + ref: ${{ matrix.tag-version }} diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 00000000000..d8688258431 --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,46 @@ +name: Benchmark Main + +on: + push: + branches: [ main ] + workflow_dispatch: + +permissions: + contents: read + +jobs: + sdk-benchmark: + permissions: + contents: write # for git push to benchmarks branch + name: Benchmark SDK + runs-on: self-hosted + timeout-minutes: 10 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - id: setup-java + name: Set up Java for build + uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4.7.0 + with: + distribution: temurin + java-version: 17 + + - name: Set up gradle + uses: gradle/actions/setup-gradle@94baf225fe0a508e581a564467443d0e2379123b # v4.3.0 + - name: Run jmh + run: ./gradlew jmhJar + + - name: Run Benchmark + run: | + cd sdk/trace/build + java -jar libs/opentelemetry-sdk-trace-*-jmh.jar -rf json SpanBenchmark SpanPipelineBenchmark ExporterBenchmark + + - name: Store benchmark results + uses: benchmark-action/github-action-benchmark@d48d326b4ca9ba73ca0cd0d59f108f9e02a381c7 # v1.20.4 + with: + tool: 'jmh' + output-file-path: sdk/trace/build/jmh-result.json + gh-pages-branch: benchmarks + github-token: ${{ secrets.GITHUB_TOKEN }} + benchmark-data-dir-path: "benchmarks" + auto-push: true diff --git a/.github/workflows/build-tracecontext-testsuite.yml b/.github/workflows/build-tracecontext-testsuite.yml index 750a307b017..c6540c2d766 100644 --- a/.github/workflows/build-tracecontext-testsuite.yml +++ b/.github/workflows/build-tracecontext-testsuite.yml @@ -9,21 +9,27 @@ on: - main workflow_dispatch: +permissions: + contents: read + jobs: publish: + permissions: + contents: read + packages: write runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Login to GitHub package registry - uses: docker/login-action@v2 + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push - uses: docker/build-push-action@v4.2.1 + uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0 with: context: integration-tests/tracecontext/docker push: true diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5c404d4b0b4..98c62ff8609 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,6 +12,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true +permissions: + contents: read + jobs: build: name: Build @@ -21,47 +24,68 @@ jobs: matrix: os: - macos-latest + - macos-13 - ubuntu-latest + - windows-latest test-java-version: - 8 - 11 - 17 - - 20 + - 21 + - 23 # Collect coverage on latest LTS include: - - os: ubuntu-20.04 - test-java-version: 17 + - os: ubuntu-latest + test-java-version: 21 coverage: true + jmh-based-tests: true + # macos-latest drops support for java 8 temurin. Run java 8 on macos-13. Run java 11, 17, 21 on macos-latest. + exclude: + - os: macos-latest + test-java-version: 8 + - os: macos-13 + test-java-version: 11 + - os: macos-13 + test-java-version: 17 + - os: macos-13 + test-java-version: 21 + - os: macos-13 + test-java-version: 23 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - id: setup-java-test name: Set up Java ${{ matrix.test-java-version }} for tests - uses: actions/setup-java@v3 + uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4.7.0 with: distribution: temurin java-version: ${{ matrix.test-java-version }} - id: setup-java name: Set up Java for build - uses: actions/setup-java@v3 + uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4.7.0 with: distribution: temurin java-version: 17 - - uses: gradle/gradle-build-action@v2 - with: - arguments: | - build + - name: Set up gradle + uses: gradle/actions/setup-gradle@94baf225fe0a508e581a564467443d0e2379123b # v4.3.0 + - name: Build + run: > + ./gradlew build ${{ matrix.coverage && 'jacocoTestReport' || '' }} -PtestJavaVersion=${{ matrix.test-java-version }} - -Porg.gradle.java.installations.paths=${{ steps.setup-java-test.outputs.path }},${{ steps.setup-java.outputs.path }} + "-Porg.gradle.java.installations.paths=${{ steps.setup-java-test.outputs.path }}" + "-Porg.gradle.java.installations.auto-download=false" env: - GRADLE_ENTERPRISE_ACCESS_KEY: ${{ secrets.GRADLE_ENTERPRISE_ACCESS_KEY }} + # JMH-based tests run only if this environment variable is set to true + RUN_JMH_BASED_TESTS: ${{ matrix.jmh-based-tests }} - name: Check for diff - # The jApiCmp diff compares current to latest, which isn't appropriate for release branches - if: ${{ !startsWith(github.ref_name, 'release/') && !startsWith(github.base_ref, 'release/') }} + # The jApiCmp diff compares current to latest, which isn't appropriate for release branches, or for bot-generated PRs + # this fails on windows because of the bash-specific if/then/else syntax, but that's ok + # because we only need to run this validation once (on any platform) + if: ${{ matrix.os != 'windows-latest' && !startsWith(github.ref_name, 'release/') && !startsWith(github.base_ref, 'release/') && (github.actor != 'opentelemetrybot') }} run: | # need to "git add" in case any generated files did not already exist git add docs/apidiffs @@ -75,10 +99,12 @@ jobs: exit 1 fi - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@0565863a31f2c772f9f0395002a31e3f06189574 # v5.4.0 if: ${{ matrix.coverage }} + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 if: ${{ matrix.coverage }} with: name: coverage-report @@ -107,23 +133,27 @@ jobs: name: publish-snapshots${{ (github.ref_name != 'main' || github.repository != 'open-telemetry/opentelemetry-java') && ' (skipped)' || '' }} # intentionally not blocking snapshot publishing on markdown-link-check or misspell-check needs: build - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - id: setup-java name: Set up Java - uses: actions/setup-java@v3 + uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4.7.0 with: distribution: temurin java-version: 17 - - uses: gradle/gradle-build-action@v2 + - name: Set up gradle + uses: gradle/actions/setup-gradle@94baf225fe0a508e581a564467443d0e2379123b # v4.3.0 + # skipping release branches because the versions in those branches are not snapshots + # (also this skips pull requests) + if: ${{ github.ref_name == 'main' && github.repository == 'open-telemetry/opentelemetry-java' }} + - name: Publish to Sonatype + run: ./gradlew assemble publishToSonatype # skipping release branches because the versions in those branches are not snapshots # (also this skips pull requests) if: ${{ github.ref_name == 'main' && github.repository == 'open-telemetry/opentelemetry-java' }} - with: - arguments: assemble publishToSonatype env: SONATYPE_USER: ${{ secrets.SONATYPE_USER }} SONATYPE_KEY: ${{ secrets.SONATYPE_KEY }} @@ -131,13 +161,20 @@ jobs: GPG_PASSWORD: ${{ secrets.GPG_PASSWORD }} build-graal: + name: Build GraalVM runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + test-graal-version: + - 21 + - 23 steps: - - uses: actions/checkout@v4 - - uses: graalvm/setup-graalvm@v1 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: graalvm/setup-graalvm@01ed653ac833fe80569f1ef9f25585ba2811baab # v1.3.3 with: - version: 'latest' - java-version: '17' + java-version: ${{ matrix.test-graal-version }} + distribution: 'graalvm' components: 'native-image' github-token: ${{ secrets.GITHUB_TOKEN }} - name: Running test @@ -145,7 +182,6 @@ jobs: echo "GRAALVM_HOME: $GRAALVM_HOME" echo "JAVA_HOME: $JAVA_HOME" java --version - gu --version native-image --version ./gradlew nativeTest diff --git a/.github/workflows/codeql-daily.yml b/.github/workflows/codeql-daily.yml deleted file mode 100644 index b54c40e7cd4..00000000000 --- a/.github/workflows/codeql-daily.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: CodeQL (daily) - -on: - schedule: - # Daily at 01:30 (UTC) - - cron: '30 1 * * *' - workflow_dispatch: - -jobs: - analyze: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - name: Set up Java 17 - uses: actions/setup-java@v3 - with: - distribution: temurin - java-version: 17 - - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: java - # using "latest" helps to keep up with the latest Kotlin support - # see https://github.com/github/codeql-action/issues/1555#issuecomment-1452228433 - tools: latest - - - uses: gradle/gradle-build-action@v2 - with: - # skipping build cache is needed so that all modules will be analyzed - arguments: assemble --no-build-cache - - - name: Perform CodeQL analysis - uses: github/codeql-action/analyze@v2 - - open-issue-on-failure: - # open an issue on failure because it can be easy to miss CI failure notifications - needs: - - analyze - if: failure() && github.run_attempt == 1 - uses: ./.github/workflows/reusable-open-issue-on-failure.yml diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 00000000000..5f63fc39e47 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,54 @@ +name: CodeQL + +on: + pull_request: + branches: + - main + - release/* + - benchmarks + push: + branches: + - main + - release/* + - benchmarks + schedule: + - cron: "29 13 * * 2" # weekly at 13:29 UTC on Tuesday + +permissions: + contents: read + +jobs: + analyze: + permissions: + contents: read + actions: read # for github/codeql-action/init to get workflow details + security-events: write # for github/codeql-action/analyze to upload SARIF results + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: Set up Java 17 + uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4.7.0 + with: + distribution: temurin + java-version: 17 + + - name: Set up gradle + uses: gradle/actions/setup-gradle@94baf225fe0a508e581a564467443d0e2379123b # v4.3.0 + + - name: Initialize CodeQL + uses: github/codeql-action/init@6bb031afdd8eb862ea3fc1848194185e076637e5 # v3.28.11 + with: + languages: java, actions + # using "latest" helps to keep up with the latest Kotlin support + # see https://github.com/github/codeql-action/issues/1555#issuecomment-1452228433 + tools: latest + + - name: Assemble + # --no-build-cache is required for codeql to analyze all modules + # --no-daemon is required for codeql to observe the compilation + # (see https://docs.github.com/en/code-security/codeql-cli/getting-started-with-the-codeql-cli/preparing-your-code-for-codeql-analysis#specifying-build-commands) + run: ./gradlew assemble --no-build-cache --no-daemon + + - name: Perform CodeQL analysis + uses: github/codeql-action/analyze@6bb031afdd8eb862ea3fc1848194185e076637e5 # v3.28.11 diff --git a/.github/workflows/docker-test-containers-daily.yml b/.github/workflows/docker-test-containers-daily.yml index 3d3e2876dbf..63721df8e90 100644 --- a/.github/workflows/docker-test-containers-daily.yml +++ b/.github/workflows/docker-test-containers-daily.yml @@ -5,8 +5,14 @@ on: - cron: "23 3 * * *" workflow_dispatch: +permissions: + contents: read + jobs: copy-images: + permissions: + contents: read + packages: write strategy: matrix: include: @@ -21,7 +27,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Docker login - uses: docker/login-action@v2 + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 with: registry: ghcr.io username: ${{ github.repository_owner }} diff --git a/.github/workflows/fossa.yml b/.github/workflows/fossa.yml new file mode 100644 index 00000000000..982628c9121 --- /dev/null +++ b/.github/workflows/fossa.yml @@ -0,0 +1,20 @@ +name: FOSSA + +on: + push: + branches: + - main + +permissions: + contents: read + +jobs: + fossa: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - uses: fossas/fossa-action@93a52ecf7c3ac7eb40f5de77fd69b1a19524de94 # v1.5.0 + with: + api-key: ${{secrets.FOSSA_API_KEY}} + team: OpenTelemetry diff --git a/.github/workflows/generate-post-release-pr.yml b/.github/workflows/generate-post-release-pr.yml new file mode 100644 index 00000000000..b6b087537d8 --- /dev/null +++ b/.github/workflows/generate-post-release-pr.yml @@ -0,0 +1,74 @@ +name: Generate Post-Release PR +on: + workflow_dispatch: + +permissions: + contents: read + +jobs: + prereqs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - name: Verify prerequisites + run: | + if [[ $GITHUB_REF_NAME != main ]]; then + echo this workflow should only be run against main + exit 1 + fi + + create-pull-request-against-main: + permissions: + contents: write # for git push to PR branch + runs-on: ubuntu-latest + needs: + - prereqs + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - id: setup-java + name: Set up Java for build + uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4.7.0 + with: + distribution: temurin + java-version: 17 + + - name: Set environment variables + run: | + version=$(.github/scripts/get-version.sh) + echo "VERSION=$version" >> $GITHUB_ENV + prior_version=$(.github/scripts/get-prior-version.sh) + echo "PRIOR_VERSION=$prior_version" >> $GITHUB_ENV + if [[ $prior_version =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then + major="${BASH_REMATCH[1]}" + minor="${BASH_REMATCH[2]}" + patch="${BASH_REMATCH[3]}" + + two_releases_ago="$major.$((minor - 1)).$patch" + else + echo "unexpected prior version: $prior_version" + exit 1 + fi + echo "TWO_VERSIONS_AGO=$two_releases_ago" >> $GITHUB_ENV + - name: Use CLA approved github bot + run: .github/scripts/use-cla-approved-github-bot.sh + + - name: Create pull request against main + env: + # not using secrets.GITHUB_TOKEN since pull requests from that token do not run workflows + GH_TOKEN: ${{ secrets.OPENTELEMETRYBOT_GITHUB_TOKEN }} + run: | + ./gradlew updateVersionInDocs -Prelease.version=$PRIOR_VERSION + ./gradlew japicmp -PapiBaseVersion=$TWO_VERSIONS_AGO -PapiNewVersion=$PRIOR_VERSION + ./gradlew --refresh-dependencies japicmp + + message="Post release for version $PRIOR_VERSION" + body="Post-release updates for version \`$PRIOR_VERSION\`." + branch="opentelemetrybot/post-release-for-${PRIOR_VERSION}" + + git checkout -b $branch + git add docs/apidiffs + git commit -a -m "$message" + git push --set-upstream origin $branch + gh pr create --title "$message" \ + --body "$body" \ + --base main diff --git a/.github/workflows/gradle-wrapper-validation.yml b/.github/workflows/gradle-wrapper-validation.yml index b42e7b96b2c..24a86ddecd2 100644 --- a/.github/workflows/gradle-wrapper-validation.yml +++ b/.github/workflows/gradle-wrapper-validation.yml @@ -1,16 +1,16 @@ name: Gradle wrapper validation + on: - pull_request: - paths: - - '**/gradle/wrapper/**' push: - paths: - - '**/gradle/wrapper/**' + pull_request: + +permissions: + contents: read jobs: - validation: + gradle-wrapper-validation: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: gradle/wrapper-validation-action@v1.1.0 + - uses: gradle/actions/wrapper-validation@94baf225fe0a508e581a564467443d0e2379123b # v4.3.0 diff --git a/.github/workflows/issue-management-feedback-label.yml b/.github/workflows/issue-management-feedback-label.yml index da9aa75b7bb..2bd368d2be9 100644 --- a/.github/workflows/issue-management-feedback-label.yml +++ b/.github/workflows/issue-management-feedback-label.yml @@ -4,14 +4,20 @@ on: issue_comment: types: [created] +permissions: + contents: read + jobs: issue_comment: + permissions: + contents: read + issues: write if: > contains(github.event.issue.labels.*.name, 'needs author feedback') && github.event.comment.user.login == github.event.issue.user.login runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Remove label env: diff --git a/.github/workflows/issue-management-stale-action.yml b/.github/workflows/issue-management-stale-action.yml index 18f80c1960e..44fd26028c7 100644 --- a/.github/workflows/issue-management-stale-action.yml +++ b/.github/workflows/issue-management-stale-action.yml @@ -5,11 +5,18 @@ on: # hourly at minute 23 - cron: "23 * * * *" +permissions: + contents: read + jobs: stale: + permissions: + contents: read + issues: write # for actions/stale to close stale issues + pull-requests: write # for actions/stale to close stale PRs runs-on: ubuntu-latest steps: - - uses: actions/stale@v8 + - uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0 with: repo-token: ${{ secrets.GITHUB_TOKEN }} days-before-stale: 7 diff --git a/.github/workflows/ossf-scorecard.yml b/.github/workflows/ossf-scorecard.yml new file mode 100644 index 00000000000..ebc6b099014 --- /dev/null +++ b/.github/workflows/ossf-scorecard.yml @@ -0,0 +1,47 @@ +name: OSSF Scorecard + +on: + push: + branches: + - main + schedule: + - cron: "43 6 * * 5" # weekly at 06:43 (UTC) on Friday + workflow_dispatch: + +permissions: read-all + +jobs: + analysis: + runs-on: ubuntu-latest + permissions: + # Needed for Code scanning upload + security-events: write + # Needed for GitHub OIDC token if publish_results is true + id-token: write + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + + - uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1 + with: + results_file: results.sarif + results_format: sarif + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable + # uploads of run results in SARIF format to the repository Actions tab. + # https://docs.github.com/en/actions/advanced-guides/storing-workflow-data-as-artifacts + - name: "Upload artifact" + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard (optional). + # Commenting out will disable upload of results to your repo's Code Scanning dashboard + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@6bb031afdd8eb862ea3fc1848194185e076637e5 # v3.28.11 + with: + sarif_file: results.sarif diff --git a/.github/workflows/owasp-dependency-check-daily.yml b/.github/workflows/owasp-dependency-check-daily.yml index cce51d9e57a..cb2051d5521 100644 --- a/.github/workflows/owasp-dependency-check-daily.yml +++ b/.github/workflows/owasp-dependency-check-daily.yml @@ -4,27 +4,44 @@ name: OWASP dependency check (daily) on: schedule: - - cron: '30 1 * * *' + - cron: "30 1 * * *" # daily at 1:30 UTC workflow_dispatch: +permissions: + contents: read + jobs: analyze: runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-java@v3 + - uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4.7.0 with: distribution: temurin java-version: 17 - - uses: gradle/gradle-build-action@v2 - with: - arguments: "dependencyCheckAnalyze" + - name: Set up gradle + uses: gradle/actions/setup-gradle@94baf225fe0a508e581a564467443d0e2379123b # v4.3.0 + + - name: Check dependencies + run: ./gradlew dependencyCheckAnalyze + env: + NVD_API_KEY: ${{ secrets.NVD_API_KEY }} - name: Upload report if: always() - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 with: path: javaagent/build/reports + + workflow-notification: + permissions: + contents: read + issues: write + needs: + - analyze + if: always() + uses: ./.github/workflows/reusable-workflow-notification.yml + with: + success: ${{ needs.analyze.result == 'success' }} diff --git a/.github/workflows/prepare-patch-release.yml b/.github/workflows/prepare-patch-release.yml index 5da1a4f8e2c..bd4fe8cb7d4 100644 --- a/.github/workflows/prepare-patch-release.yml +++ b/.github/workflows/prepare-patch-release.yml @@ -2,11 +2,16 @@ name: Prepare patch release on: workflow_dispatch: +permissions: + contents: read + jobs: prepare-patch-release: + permissions: + contents: write # for git push to PR branch runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - run: | if [[ ! $GITHUB_REF_NAME =~ ^release/v[0-9]+\.[0-9]+\.x$ ]]; then diff --git a/.github/workflows/prepare-release-branch.yml b/.github/workflows/prepare-release-branch.yml index 63d4488e850..382c5817083 100644 --- a/.github/workflows/prepare-release-branch.yml +++ b/.github/workflows/prepare-release-branch.yml @@ -2,11 +2,14 @@ name: Prepare release branch on: workflow_dispatch: +permissions: + contents: read + jobs: prereqs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Verify prerequisites run: | @@ -21,11 +24,13 @@ jobs: fi create-pull-request-against-release-branch: + permissions: + contents: write # for git push to PR branch runs-on: ubuntu-latest needs: - prereqs steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Create release branch run: | @@ -70,11 +75,13 @@ jobs: --base $RELEASE_BRANCH_NAME create-pull-request-against-main: + permissions: + contents: write # for git push to PR branch runs-on: ubuntu-latest needs: - prereqs steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Set environment variables run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2b9b785a277..7124f097b6a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,9 +2,14 @@ name: Release on: workflow_dispatch: +permissions: + contents: read + jobs: release: - runs-on: ubuntu-22.04 + permissions: + contents: write # for creating the release + runs-on: ubuntu-24.04 outputs: version: ${{ steps.create-github-release.outputs.version }} steps: @@ -14,17 +19,18 @@ jobs: exit 1 fi - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-java@v3 + - uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4.7.0 with: distribution: temurin java-version: 17 + - name: Set up gradle + uses: gradle/actions/setup-gradle@94baf225fe0a508e581a564467443d0e2379123b # v4.3.0 + - name: Build and publish artifacts - uses: gradle/gradle-build-action@v2 - with: - arguments: assemble publishToSonatype closeAndReleaseSonatypeStagingRepository + run: ./gradlew assemble publishToSonatype closeAndReleaseSonatypeStagingRepository env: SONATYPE_USER: ${{ secrets.SONATYPE_USER }} SONATYPE_KEY: ${{ secrets.SONATYPE_KEY }} @@ -58,7 +64,7 @@ jobs: # check out main branch to verify there won't be problems with merging the change log # at the end of this workflow - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: main @@ -73,7 +79,7 @@ jobs: fi # back to the release branch - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: # tags are needed for the generate-release-contributors.sh script fetch-depth: 0 @@ -120,17 +126,18 @@ jobs: gh release create --target $GITHUB_REF_NAME \ --title "Version $VERSION" \ --notes-file /tmp/release-notes.txt \ - --discussion-category announcements \ v$VERSION echo "version=$VERSION" >> $GITHUB_OUTPUT merge-change-log-to-main: + permissions: + contents: write # for git push to PR branch runs-on: ubuntu-latest needs: - release steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Copy change log section from release branch env: @@ -139,7 +146,7 @@ jobs: sed -n "0,/^## Version $VERSION /d;/^## Version /q;p" CHANGELOG.md \ > /tmp/changelog-section.md - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: main diff --git a/.github/workflows/reusable-markdown-link-check.yml b/.github/workflows/reusable-markdown-link-check.yml index 4e7ad15c423..3756f98a790 100644 --- a/.github/workflows/reusable-markdown-link-check.yml +++ b/.github/workflows/reusable-markdown-link-check.yml @@ -3,19 +3,20 @@ name: Reusable - Markdown link check on: workflow_call: +permissions: + contents: read + jobs: markdown-link-check: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - - name: Install markdown-link-check - # TODO(jack-berg): use latest when config file reading bug is fixed: https://github.com/tcort/markdown-link-check/issues/246 - run: npm install -g markdown-link-check@3.10.3 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - name: Run markdown-link-check - run: | - find . -type f \ - -name '*.md' \ - -not -path './CHANGELOG.md' \ - | xargs .github/scripts/markdown-link-check-with-retry.sh + - uses: lycheeverse/lychee-action@f613c4a64e50d792e0b31ec34bbcbba12263c6a6 # v2.3.0 + with: + # excluding links to pull requests and issues is done for performance + args: > + --include-fragments + --exclude "^https://github.com/open-telemetry/opentelemetry-java/(issue|pull)/\\d+$" + --max-retries 6 + . diff --git a/.github/workflows/reusable-misspell-check.yml b/.github/workflows/reusable-misspell-check.yml index 7876c441a9b..76d361c777b 100644 --- a/.github/workflows/reusable-misspell-check.yml +++ b/.github/workflows/reusable-misspell-check.yml @@ -3,11 +3,14 @@ name: Reusable - Misspell check on: workflow_call: +permissions: + contents: read + jobs: misspell-check: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install misspell run: | diff --git a/.github/workflows/reusable-open-issue-on-failure.yml b/.github/workflows/reusable-open-issue-on-failure.yml index 309b7119ed0..15a46db455a 100644 --- a/.github/workflows/reusable-open-issue-on-failure.yml +++ b/.github/workflows/reusable-open-issue-on-failure.yml @@ -3,11 +3,17 @@ name: Reusable - Open issue on workflow failure on: workflow_call: +permissions: + contents: read + jobs: open-issue: + permissions: + contents: read + issues: write # for creating the issue runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Open issue env: diff --git a/.github/workflows/reusable-workflow-notification.yml b/.github/workflows/reusable-workflow-notification.yml new file mode 100644 index 00000000000..701f90f5a08 --- /dev/null +++ b/.github/workflows/reusable-workflow-notification.yml @@ -0,0 +1,44 @@ +# this is useful because notifications for scheduled workflows are only sent to the user who +# initially created the given workflow +name: Reusable - Workflow notification + +on: + workflow_call: + inputs: + success: + type: boolean + required: true + +permissions: + contents: read + +jobs: + workflow-notification: + permissions: + contents: read + issues: write + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: Open issue or add comment if issue already open + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + # TODO (trask) search doesn't support exact phrases, so it's possible that this could grab the wrong issue + number=$(gh issue list --search "in:title Workflow failed: $GITHUB_WORKFLOW" --limit 1 --json number -q .[].number) + + echo $number + echo ${{ inputs.success }} + + if [[ $number ]]; then + if [[ "${{ inputs.success }}" == "true" ]]; then + gh issue close $number + else + gh issue comment $number \ + --body "See [$GITHUB_WORKFLOW #$GITHUB_RUN_NUMBER](https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID)." + fi + elif [[ "${{ inputs.success }}" == "false" ]]; then + gh issue create --title "Workflow failed: $GITHUB_WORKFLOW (#$GITHUB_RUN_NUMBER)" \ + --body "See [$GITHUB_WORKFLOW #$GITHUB_RUN_NUMBER](https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID)." + fi diff --git a/.gitignore b/.gitignore index 69c5f8b7cf9..6dd7dc74f2f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ # Gradle build .gradle +.kotlin local.properties out/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 4afb3d435c4..b48b4ab0eaf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,1010 @@ # Changelog -## Unreleased +## Version 1.49.0 (2025-04-04) + +### SDK + +#### Trace + +* Avoid linear queue.size() calls in span producers by storing queue size separately + ([#7141](https://github.com/open-telemetry/opentelemetry-java/pull/7141)) + +#### Exporters + +* OTLP: Add support for setting exporter executor service + ([#7152](https://github.com/open-telemetry/opentelemetry-java/pull/7152)) +* OTLP: Refine delay jitter for exponential backoff + ([#7206](https://github.com/open-telemetry/opentelemetry-java/pull/7206)) + +#### Extensions + +* Autoconfigure: Remove support for otel.experimental.exporter.otlp.retry.enabled + ([#7200](https://github.com/open-telemetry/opentelemetry-java/pull/7200)) +* Autoconfigure: Add stable cardinality limit property otel.java.metrics.cardinality.limit + ([#7199](https://github.com/open-telemetry/opentelemetry-java/pull/7199)) +* Incubator: Add declarative config model customizer SPI + ([#7118](https://github.com/open-telemetry/opentelemetry-java/pull/7118)) + +## Version 1.48.0 (2025-03-07) + +### API + +* Add some helpful logging attribute methods to `LogRecordBuilder` + ([#7089](https://github.com/open-telemetry/opentelemetry-java/pull/7089)) + +#### Incubator + +* Introduce ConfigProvider API. Rename `StructuredConfigProperties` to `DeclarativeConfigProperties` + and move to `opentelemetry-api-incubator`. Rename `FileConfiguration` + to `DeclarativeConfiguration`. + ([#6549](https://github.com/open-telemetry/opentelemetry-java/pull/6549)) + +### SDK + +* Log warning and adjust when BatchLogRecordProcessor, BatchSpanProcessor `maxExportBatchSize` + exceeds `maxQueueSize`. + ([#7045](https://github.com/open-telemetry/opentelemetry-java/pull/7045), + [#7148](https://github.com/open-telemetry/opentelemetry-java/pull/7148)) +* Fix bug causing `ThrottlingLogger` to log more than once per minute + ([#7156](https://github.com/open-telemetry/opentelemetry-java/pull/7156)) + +#### Metrics + +* Remove obsolete `SdkMeterProviderUtil#setCardinalitylimit` API + ([#7169](https://github.com/open-telemetry/opentelemetry-java/pull/7169)) + +#### Traces + +* Fix bug preventing accurate reporting of span event dropped attribute count + ([#7142](https://github.com/open-telemetry/opentelemetry-java/pull/7142)) + +#### Exporters + +* OTLP: remove support for `otel.java.experimental.exporter.memory_mode` + which was previously replaced by `otel.java.exporter.memory_mode` + ([#7127](https://github.com/open-telemetry/opentelemetry-java/pull/7127)) +* OTLP: Extract sender parameters to config carrier class + (incubating API) + ([#7151](https://github.com/open-telemetry/opentelemetry-java/pull/7151)) +* OTLP: Add support for setting OTLP exporter service class loader + ([#7150](https://github.com/open-telemetry/opentelemetry-java/pull/7150)) + +### Tooling + +* Update android animalsniffer min API version to 23 + ([#7153](https://github.com/open-telemetry/opentelemetry-java/pull/7153)) + +## Version 1.47.0 (2025-02-07) + +### API + +#### Incubator + +* Make `ExtendedTracer` easier to use + ([#6943](https://github.com/open-telemetry/opentelemetry-java/pull/6943)) +* Add `ExtendedLogRecordBuilder#setEventName` and corresponding SDK and OTLP serialization + ([#7012](https://github.com/open-telemetry/opentelemetry-java/pull/7012)) +* BREAKING: Drop event API / SDK + ([#7053](https://github.com/open-telemetry/opentelemetry-java/pull/7053)) + +### SDK + +* Remove -alpha artifacts from runtime classpath of stable components + ([#6944](https://github.com/open-telemetry/opentelemetry-java/pull/6944)) + +#### Traces + +* Bugfix: Follow spec on span limits, batch processors + ([#7030](https://github.com/open-telemetry/opentelemetry-java/pull/7030)) +* Add experimental `SdkTracerProvider.setScopeConfigurator(ScopeConfigurator)` for + updating `TracerConfig` at runtime + ([#7021](https://github.com/open-telemetry/opentelemetry-java/pull/7021)) + +#### Profiles + +* Add AttributeKeyValue abstraction to common otlp exporters + ([#7026](https://github.com/open-telemetry/opentelemetry-java/pull/7026)) +* Improve profiles attribute table handling + ([#7031](https://github.com/open-telemetry/opentelemetry-java/pull/7031)) + +#### Exporters + +* Interpret timeout zero value as no limit + ([#7023](https://github.com/open-telemetry/opentelemetry-java/pull/7023)) +* Bugfix - OTLP: Fix concurrent span reusable data marshaler + ([#7041](https://github.com/open-telemetry/opentelemetry-java/pull/7041)) +* OTLP: Add ability to customize retry exception predicate + ([#6991](https://github.com/open-telemetry/opentelemetry-java/pull/6991)) +* OTLP: Expand default OkHttp sender retry exception predicate + ([#7047](https://github.com/open-telemetry/opentelemetry-java/pull/7047), + [#7057](https://github.com/open-telemetry/opentelemetry-java/pull/7057)) + +#### Extensions + +* Autoconfigure: Consistent application of exporter customizers when otel.{signal}.exporter=none + ([#7017](https://github.com/open-telemetry/opentelemetry-java/pull/7017)) +* Autoconfigure: Promote EnvironmentResourceProvider to public API + ([#7052](https://github.com/open-telemetry/opentelemetry-java/pull/7052)) +* Autoconfigure: Ensure `OTEL_PROPAGATORS` still works when `OTEL_SDK_DISABLED=true`. + ([#7062](https://github.com/open-telemetry/opentelemetry-java/pull/7062))% + +#### Testing + +* Add W3CBaggagePropagator to `OpenTelemetryRule`, `OpenTelemetryExtension`. + ([#7056](https://github.com/open-telemetry/opentelemetry-java/pull/7056)) + +## Version 1.46.0 (2025-01-10) + +### SDK + +* Remove unused dependencies, cleanup code after stabilizing Value + ([#6948](https://github.com/open-telemetry/opentelemetry-java/pull/6948)) +* Explicitly allow null into CompletableResultCode.failExceptionally() + ([#6963](https://github.com/open-telemetry/opentelemetry-java/pull/6963)) + +#### Traces + +* Fix span setStatus + ([#6990](https://github.com/open-telemetry/opentelemetry-java/pull/6990)) + +#### Logs + +* Add getters/accessors for readable fields in ReadWriteLogRecord. + ([#6924](https://github.com/open-telemetry/opentelemetry-java/pull/6924)) + +#### Exporters + +* OTLP: Update to opentelemetry-proto 1.5 + ([#6999](https://github.com/open-telemetry/opentelemetry-java/pull/6999)) +* Bugfix - OTLP: Ensure Serializer runtime exceptions are rethrown as IOException + ([#6969](https://github.com/open-telemetry/opentelemetry-java/pull/6969)) +* BREAKING - OTLP: Delete experimental OTLP authenticator concept. + See [OTLP authentication docs](https://opentelemetry.io/docs/languages/java/sdk/#authentication) + for supported solutions. + ([#6984](https://github.com/open-telemetry/opentelemetry-java/pull/6984)) + +#### Extensions + +* BREAKING - Autoconfigure: Remove support for deprecated otel.experimental.resource.disabled.keys + ([#6931](https://github.com/open-telemetry/opentelemetry-java/pull/6931)) + +## Version 1.45.0 (2024-12-06) + +### API + +* Add convenience method `setAttribute(Attribute, int)` to SpanBuilder (matching the existing + convenience method in Span) + ([#6884](https://github.com/open-telemetry/opentelemetry-java/pull/6884)) +* Extends TextMapGetter with experimental GetAll() method, implement usage in W3CBaggagePropagator + ([#6852](https://github.com/open-telemetry/opentelemetry-java/pull/6852)) + +### SDK + +#### Traces + +* Add synchronization to SimpleSpanProcessor to ensure thread-safe export of spans + ([#6885](https://github.com/open-telemetry/opentelemetry-java/pull/6885)) + +#### Metrics + +* Lazily initialize ReservoirCells + ([#6851](https://github.com/open-telemetry/opentelemetry-java/pull/6851)) + +#### Logs + +* Add synchronization to SimpleLogRecordProcessor to ensure thread-safe export of logs + ([#6885](https://github.com/open-telemetry/opentelemetry-java/pull/6885)) + +#### Exporters + +* OTLP: Update opentelementry-proto to 1.4 + ([#6906](https://github.com/open-telemetry/opentelemetry-java/pull/6906)) +* OTLP: Rename internal Marshaler#writeJsonToGenerator method to allow jackson runtimeOnly dependency + ([#6896](https://github.com/open-telemetry/opentelemetry-java/pull/6896)) +* OTLP: Fix repeated string serialization for JSON. + ([#6888](https://github.com/open-telemetry/opentelemetry-java/pull/6888)) +* OTLP: Fix missing unsafe available check + ([#6920](https://github.com/open-telemetry/opentelemetry-java/pull/6920)) + +#### Extensions + +* Declarative config: Don't require empty objects when referencing custom components + ([#6891](https://github.com/open-telemetry/opentelemetry-java/pull/6891)) + +### Tooling + +* Add javadoc boilerplate internal comment v2 for experimental classes + ([#6886](https://github.com/open-telemetry/opentelemetry-java/pull/6886)) +* Update develocity configuration + ([#6903](https://github.com/open-telemetry/opentelemetry-java/pull/6903)) + +## Version 1.44.1 (2024-11-10) + +### SDK + +#### Traces + +* Fix regression in event attributes + ([#6865](https://github.com/open-telemetry/opentelemetry-java/pull/6865)) + +## Version 1.44.0 (2024-11-08) + +### API + +* Fix ConfigUtil#getString ConcurrentModificationException + ([#6841](https://github.com/open-telemetry/opentelemetry-java/pull/6841)) + +### SDK + +#### Traces + +* Stabilize ExceptionEventData + ([#6795](https://github.com/open-telemetry/opentelemetry-java/pull/6795)) + +#### Metrics + +* Stabilize metric cardinality limits + ([#6794](https://github.com/open-telemetry/opentelemetry-java/pull/6794)) +* Refactor metrics internals to remove MeterSharedState + ([#6845](https://github.com/open-telemetry/opentelemetry-java/pull/6845)) + +#### Exporters + +* Add memory mode option to stdout exporters + ([#6774](https://github.com/open-telemetry/opentelemetry-java/pull/6774)) +* Log a warning if OTLP endpoint port is likely incorrect given the protocol + ([#6813](https://github.com/open-telemetry/opentelemetry-java/pull/6813)) +* Fix OTLP gRPC retry mechanism for unsuccessful HTTP responses + ([#6829](https://github.com/open-telemetry/opentelemetry-java/pull/6829)) +* Add ByteBuffer field type marshaling support + ([#6686](https://github.com/open-telemetry/opentelemetry-java/pull/6686)) +* Fix stdout exporter format by adding newline after each export + ([#6848](https://github.com/open-telemetry/opentelemetry-java/pull/6848)) +* Enable `reusuable_data` memory mode by default for `OtlpGrpc{Signal}Exporter`, + `OtlpHttp{Signal}Exporter`, `OtlpStdout{Signal}Exporter`, and `PrometheusHttpServer` + ([#6799](https://github.com/open-telemetry/opentelemetry-java/pull/6799)) + +#### Extension + +* Rebrand file configuration to declarative configuration in documentation + ([#6812](https://github.com/open-telemetry/opentelemetry-java/pull/6812)) +* Fix declarative config `file_format` validation + ([#6786](https://github.com/open-telemetry/opentelemetry-java/pull/6786)) +* Fix declarative config env substitution by disallowing '}' in default value + ([#6793](https://github.com/open-telemetry/opentelemetry-java/pull/6793)) +* Set declarative config default OTLP protocol to http/protobuf + ([#6800](https://github.com/open-telemetry/opentelemetry-java/pull/6800)) +* Stabilize autoconfigure disabling of resource keys via `otel.resource.disabled.keys` + ([#6809](https://github.com/open-telemetry/opentelemetry-java/pull/6809)) + +### Tooling + +* Run tests on Java 23 + ([#6825](https://github.com/open-telemetry/opentelemetry-java/pull/6825)) +* Test Windows in CI + ([#6824](https://github.com/open-telemetry/opentelemetry-java/pull/6824)) +* Add error prone checks for internal javadoc and private constructors + ([#6844](https://github.com/open-telemetry/opentelemetry-java/pull/6844)) + +## Version 1.43.0 (2024-10-11) + +### API + +* Add helper class to capture context using ScheduledExecutorService + ([#6712](https://github.com/open-telemetry/opentelemetry-java/pull/6712)) +* Adds Baggage.getEntry(String key) + ([#6765](https://github.com/open-telemetry/opentelemetry-java/pull/6765)) + +#### Extensions + +* Fix ottracepropagation for short span ids + ([#6734](https://github.com/open-telemetry/opentelemetry-java/pull/6734)) + +### SDK + +#### Metrics + +* Optimize advice with FilteredAttributes + ([#6633](https://github.com/open-telemetry/opentelemetry-java/pull/6633)) + +#### Exporters + +* Add experimental stdout log, metric, trace exporters for printing records to stdout in standard + OTLP JSON format. + ([#6675](https://github.com/open-telemetry/opentelemetry-java/pull/6675), [#6750](https://github.com/open-telemetry/opentelemetry-java/pull/6750)) +* Add Marshalers for profiling signal type + ([#6680](https://github.com/open-telemetry/opentelemetry-java/pull/6680)) + +#### Extensions + +* Add `*Model` suffix to declarative config generated classes. + ([#6721](https://github.com/open-telemetry/opentelemetry-java/pull/6721)) +* Use autoconfigured ClassLoader to load declarative config + ([#6725](https://github.com/open-telemetry/opentelemetry-java/pull/6725)) +* Update declarative config to use opentelemetry-configuration v0.3.0 + ([#6733](https://github.com/open-telemetry/opentelemetry-java/pull/6733)) +* Add `StructuredConfigProperties#getStructured` default method, + add `StructuredConfigProperties.empty()` + ([#6759](https://github.com/open-telemetry/opentelemetry-java/pull/6759)) + +#### Testing + +* Add context info about wrong span or trace. + ([#6703](https://github.com/open-telemetry/opentelemetry-java/pull/6703)) + +## Version 1.42.1 (2024-09-10) + +### API + +* Revert `java-test-fixtures` plugin to remove test dependencies from `pom.xml`. + ([#6695](https://github.com/open-telemetry/opentelemetry-java/pull/6695)) + +## Version 1.42.0 (2024-09-06) + +### API + +* BREAKING: Stabilize log support for AnyValue bodies. Rename `AnyValue` to `Value`, promote + from `opentelemetry-api-incubator` to `opentelemetry-api`, change package + from `io.opentelemetry.api.incubator.logs` to `io.opentelemetry.api.common`. + ([#6591](https://github.com/open-telemetry/opentelemetry-java/pull/6591)) +* Noop implementations detect when `opentelemetry-api-incubator` is present and return extended noop + implementations. + ([#6617](https://github.com/open-telemetry/opentelemetry-java/pull/6617))% + +### SDK + +#### Traces + +* Added experimental support for SpanProcessor OnEnding callback + ([#6367](https://github.com/open-telemetry/opentelemetry-java/pull/6367)) +* Remove final modifier from SdkTracer.tracerEnabled + ([#6687](https://github.com/open-telemetry/opentelemetry-java/pull/6687)) + +#### Exporters + +* Suppress zipkin exporter instrumentation + ([#6552](https://github.com/open-telemetry/opentelemetry-java/pull/6552)) +* OTLP exporters return status code exceptions via CompletableResultCode in GrpcExporter and + HttpExporter. + ([#6645](https://github.com/open-telemetry/opentelemetry-java/pull/6645)) +* Align GrpcSender contract with HttpSender + ([#6658](https://github.com/open-telemetry/opentelemetry-java/pull/6658)) + +#### Extensions + +* Add autoconfigure support for ns and us durations + ([#6654](https://github.com/open-telemetry/opentelemetry-java/pull/6654)) +* Add declarative configuration ComponentProvider support for resources + ([#6625](https://github.com/open-telemetry/opentelemetry-java/pull/6625)) +* Add declarative configuration ComponentProvider support for processors + ([#6623](https://github.com/open-telemetry/opentelemetry-java/pull/6623)) +* Add declarative configuration ComponentProvider support for samplers + ([#6494](https://github.com/open-telemetry/opentelemetry-java/pull/6494)) +* Add declarative configuration ComponentProvider support for propagators + ([#6624](https://github.com/open-telemetry/opentelemetry-java/pull/6624)) +* Add declarative configuration missing pieces + ([#6677](https://github.com/open-telemetry/opentelemetry-java/pull/6677)) +* Change jaeger remote sampler autoconfigure property from `pollingInterval` to `pollingIntervalMs` + to match spec. + ([#6672](https://github.com/open-telemetry/opentelemetry-java/pull/6672)) + +#### Testing + +* Add asserts for log record body fields + ([#6509](https://github.com/open-telemetry/opentelemetry-java/pull/6509)) + +## Version 1.41.0 (2024-08-09) + +### API + +* Move experimental suppress instrumentation context key to api internal package + ([#6546](https://github.com/open-telemetry/opentelemetry-java/pull/6546)) + +#### Incubator + +* Fix bug in `ExtendedContextPropagators` preventing context extraction when case is incorrect. + ([#6569](https://github.com/open-telemetry/opentelemetry-java/pull/6569)) + +### SDK + +* Extend `CompletableResultCode` with `failExceptionally(Throwable)`. + ([#6348](https://github.com/open-telemetry/opentelemetry-java/pull/6348)) + +#### Metrics + +* Avoid allocations when experimental advice doesn't remove any attributes. + ([#6629](https://github.com/open-telemetry/opentelemetry-java/pull/6629)) + +#### Exporter + +* Enable retry by default for OTLP exporters. + ([#6588](https://github.com/open-telemetry/opentelemetry-java/pull/6588)) +* Retry ConnectException, add retry logging. + ([#6614](https://github.com/open-telemetry/opentelemetry-java/pull/6614)) +* Extend `PrometheusHttpServer` with ability to configure default aggregation as function of + instrument kind, including experimental env var support. + ([#6541](https://github.com/open-telemetry/opentelemetry-java/pull/6541)) +* Add exporter data model impl for profiling signal type. + ([#6498](https://github.com/open-telemetry/opentelemetry-java/pull/6498)) +* Add Marshalers for profiling signal type. + ([#6565](https://github.com/open-telemetry/opentelemetry-java/pull/6565)) +* Use generateCertificates() of CertificateFactory to process certificates. + ([#6579](https://github.com/open-telemetry/opentelemetry-java/pull/6579)) + +#### Extensions + +* Add file configuration ComponentProvider support for exporters. + ([#6493](https://github.com/open-telemetry/opentelemetry-java/pull/6493)) +* Remove nullable from file config Factory contract. + ([#6612](https://github.com/open-telemetry/opentelemetry-java/pull/6612)) + +## Version 1.40.0 (2024-07-05) + +### API + +#### Incubator + +* Narrow ExtendedSpanBuilder return types for chaining + ([#6514](https://github.com/open-telemetry/opentelemetry-java/pull/6514)) +* Add APIs to determine if tracer, logger, instruments are enabled + ([#6502](https://github.com/open-telemetry/opentelemetry-java/pull/6502)) + +### SDK + +#### Extensions + +* Move autoconfigure docs to opentelemetry.io + ([#6491](https://github.com/open-telemetry/opentelemetry-java/pull/6491)) + +## Version 1.39.0 (2024-06-07) + +### API + +#### Incubator + +* BREAKING: Refactor ExtendedTracer, ExtendedSpanBuilder to reflect incubating API conventions + ([#6497](https://github.com/open-telemetry/opentelemetry-java/pull/6497)) + +### SDK + +#### Exporter + +* BREAKING: Serve prometheus metrics only on `/metrics` by default. To restore the previous behavior + and serve metrics on all paths, override the default handler + as [demonstrated here](https://github.com/open-telemetry/opentelemetry-java/blob/main/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServerTest.java#L251-L259). + ([#6476](https://github.com/open-telemetry/opentelemetry-java/pull/6476)) +* Make OTLP exporter memory mode API public + ([#6469](https://github.com/open-telemetry/opentelemetry-java/pull/6469)) +* Speed up OTLP string marshaling using sun.misc.Unsafe + ([#6433](https://github.com/open-telemetry/opentelemetry-java/pull/6433)) +* Add exporter data classes for experimental profiling signal type. + ([#6374](https://github.com/open-telemetry/opentelemetry-java/pull/6374)) +* Start prometheus http server with daemon thread + ([#6472](https://github.com/open-telemetry/opentelemetry-java/pull/6472)) +* Update the Prometheus metrics library and improve how units are included in metric names. + ([#6473](https://github.com/open-telemetry/opentelemetry-java/pull/6473)) +* Remove android animalsniffer check from prometheus exporter + ([#6478](https://github.com/open-telemetry/opentelemetry-java/pull/6478)) + +#### Extensions + +* Load file config YAML using core schema, ensure that env var substitution retains string types. + ([#6436](https://github.com/open-telemetry/opentelemetry-java/pull/6436)) +* Define dedicated file configuration SPI ComponentProvider + ([#6457](https://github.com/open-telemetry/opentelemetry-java/pull/6457)) + +### Tooling + +* Normalize timestamps and file ordering in jars, making the outputs reproducible + ([#6471](https://github.com/open-telemetry/opentelemetry-java/pull/6471)) +* GHA for generating the post-release pull request + ([#6449](https://github.com/open-telemetry/opentelemetry-java/pull/6449)) + +## Version 1.38.0 (2024-05-10) + +### API + +* Stabilize synchronous gauge + ([#6419](https://github.com/open-telemetry/opentelemetry-java/pull/6419)) + +#### Incubator + +* Add put(AttributeKey, T) overload to EventBuilder + ([#6331](https://github.com/open-telemetry/opentelemetry-java/pull/6331)) + +#### Baggage + +* Baggage filters space-only keys + ([#6431](https://github.com/open-telemetry/opentelemetry-java/pull/6431)) + +### SDK + +* Add experimental scope config to enable / disable scopes (i.e. meter, logger, tracer) + ([#6375](https://github.com/open-telemetry/opentelemetry-java/pull/6375)) + +#### Traces + +* Add ReadableSpan#getAttributes + ([#6382](https://github.com/open-telemetry/opentelemetry-java/pull/6382)) +* Use standard ArrayList size rather than max number of links for initial span links allocation + ([#6252](https://github.com/open-telemetry/opentelemetry-java/pull/6252)) + +#### Metrics + +* Use low precision Clock#now when computing timestamp for exemplars + ([#6417](https://github.com/open-telemetry/opentelemetry-java/pull/6417)) +* Update invalid instrument name log message now that forward slash `/` is valid + ([#6343](https://github.com/open-telemetry/opentelemetry-java/pull/6343)) + +#### Exporters + +* Introduce low allocation OTLP marshalers. If using autoconfigure, opt in + via `OTEL_JAVA_EXPERIMENTAL_EXPORTER_MEMORY_MODE=REUSABLE_DATA`. + * Low allocation OTLP logs marshaler + ([#6429](https://github.com/open-telemetry/opentelemetry-java/pull/6429)) + * Low allocation OTLP metrics marshaler + ([#6422](https://github.com/open-telemetry/opentelemetry-java/pull/6422)) + * Low allocation OTLP trace marshaler + ([#6410](https://github.com/open-telemetry/opentelemetry-java/pull/6410)) + * Add memory mode support to OTLP exporters + ([#6430](https://github.com/open-telemetry/opentelemetry-java/pull/6430)) + * Marshal span status description without allocation + ([#6423](https://github.com/open-telemetry/opentelemetry-java/pull/6423)) + * Add private constructors for stateless marshalers + ([#6434](https://github.com/open-telemetry/opentelemetry-java/pull/6434)) +* Mark opentelemetry-exporter-sender-jdk stable + ([#6357](https://github.com/open-telemetry/opentelemetry-java/pull/6357)) +* PrometheusHttpServer prevent concurrent reads when reusable memory mode + ([#6371](https://github.com/open-telemetry/opentelemetry-java/pull/6371)) +* Ignore TLS components (SSLContext, TrustManager, KeyManager) if plain HTTP protocol is used for + exporting + ([#6329](https://github.com/open-telemetry/opentelemetry-java/pull/6329)) +* Add is_remote_parent span flags to OTLP exported Spans and SpanLinks + ([#6388](https://github.com/open-telemetry/opentelemetry-java/pull/6388)) +* Add missing fields to OTLP metric exporters `toString()` + ([#6402](https://github.com/open-telemetry/opentelemetry-java/pull/6402)) + +#### Extensions + +* Rename otel.config.file to otel.experimental.config.file for autoconfigure + ([#6396](https://github.com/open-telemetry/opentelemetry-java/pull/6396)) + +### OpenCensus Shim + +* Fix opencensus shim spanBuilderWithRemoteParent behavior + ([#6415](https://github.com/open-telemetry/opentelemetry-java/pull/6415)) + +### Tooling + +* Add additional API incubator docs + ([#6356](https://github.com/open-telemetry/opentelemetry-java/pull/6356)) +* Run build on java 21 + ([#6370](https://github.com/open-telemetry/opentelemetry-java/pull/6370)) +* Fix running tests with java 8 on macos + ([#6411](https://github.com/open-telemetry/opentelemetry-java/pull/6411)) +* Move away from deprecated gradle enterprise APIs + ([#6363](https://github.com/open-telemetry/opentelemetry-java/pull/6363)) + +## Version 1.37.0 (2024-04-05) + +**NOTICE:** This release contains a significant restructuring of the experimental event API and the API incubator artifact. Please read the notes in the `API -> Incubator` section carefully. + +### API + +* Promote `Span#addLink` to stable API + ([#6317](https://github.com/open-telemetry/opentelemetry-java/pull/6317)) + +#### Incubator + +* BREAKING: Rename `opentelemetry-extension-incubator` to `opentelemetry-api-incubator`, + merge `opentelemetry-api-events` into `opentelemetry-api-incubator`. + ([#6289](https://github.com/open-telemetry/opentelemetry-java/pull/6289)) +* BREAKING: Remove domain from event api. `EventEmitterProvider#setEventDomain` has been removed. + The `event.name` field should now be namespaced to avoid collisions. + See [Semantic Conventions for Event Attributes](https://opentelemetry.io/docs/specs/semconv/general/events/) + for more details. + ([#6253](https://github.com/open-telemetry/opentelemetry-java/pull/6253)) +* BREAKING: Rename `EventEmitter` and related classes to `EventLogger`. + ([#6316](https://github.com/open-telemetry/opentelemetry-java/pull/6316)) +* BREAKING: Refactor Event API to reflect spec changes. Restructure API to put fields in + the `AnyValue` log record body. Add setters for timestamp, context, and severity. Set default + severity to `INFO=9`. + ([#6318](https://github.com/open-telemetry/opentelemetry-java/pull/6318)) + +### SDK + +* Add `get{Signal}Exporter` methods to `Simple{Signal}Processor`, `Batch{Signal}Processor`. + ([#6078](https://github.com/open-telemetry/opentelemetry-java/pull/6078)) + +#### Metrics + +* Use synchronized instead of reentrant lock in explicit bucket histogram + ([#6309](https://github.com/open-telemetry/opentelemetry-java/pull/6309)) + +#### Exporters + +* Fix typo in OTLP javadoc + ([#6311](https://github.com/open-telemetry/opentelemetry-java/pull/6311)) +* Add `PrometheusHttpServer#toBuilder()` + ([#6333](https://github.com/open-telemetry/opentelemetry-java/pull/6333)) +* Bugfix: Use `getPrometheusName` for Otel2PrometheusConverter map keys to avoid metric name + conflicts + ([#6308](https://github.com/open-telemetry/opentelemetry-java/pull/6308)) + +#### Extensions + +* Add Metric exporter REUSABLE_DATA memory mode configuration options, including autoconfigure + support via env var `OTEL_JAVA_EXPERIMENTAL_EXPORTER_MEMORY_MODE=REUSABLE_DATA`. + ([#6304](https://github.com/open-telemetry/opentelemetry-java/pull/6304)) +* Add autoconfigure console alias for logging exporter + ([#6027](https://github.com/open-telemetry/opentelemetry-java/pull/6027)) +* Update jaeger autoconfigure docs to point to OTLP + ([#6307](https://github.com/open-telemetry/opentelemetry-java/pull/6307)) +* Add `ServiceInstanceIdResourceProvider` implementation for generating `service.instance.id` UUID + if not already provided by user. Included in `opentelemetry-sdk-extension-incubator`. + ([#6226](https://github.com/open-telemetry/opentelemetry-java/pull/6226)) +* Add GCP resource detector to list of resource providers in autoconfigure docs + ([#6336](https://github.com/open-telemetry/opentelemetry-java/pull/6336)) + +### Tooling + +* Check for Java 17 toolchain and fail if not found + ([#6303](https://github.com/open-telemetry/opentelemetry-java/pull/6303)) + +## Version 1.36.0 (2024-03-08) + +### SDK + +#### Traces + +* Lazily initialize the container for events in the SDK Span implementation + ([#6244](https://github.com/open-telemetry/opentelemetry-java/pull/6244)) + +#### Exporters + +* Add basic proxy configuration to OtlpHttp{Signal}Exporters + ([#6270](https://github.com/open-telemetry/opentelemetry-java/pull/6270)) +* Add connectTimeout configuration option OtlpGrpc{Signal}Exporters + ([#6079](https://github.com/open-telemetry/opentelemetry-java/pull/6079)) + +#### Extensions + +* Add ComponentLoader to autoconfigure support more scenarios + ([#6217](https://github.com/open-telemetry/opentelemetry-java/pull/6217)) +* Added MetricReader customizer for AutoConfiguredOpenTelemetrySdkBuilder + ([#6231](https://github.com/open-telemetry/opentelemetry-java/pull/6231)) +* Return AutoConfiguredOpenTelemetrySdkBuilder instead of the base type + ([#6248](https://github.com/open-telemetry/opentelemetry-java/pull/6248)) + +### Tooling + +* Add note about draft PRs to CONTRIBUTING.md + ([#6247](https://github.com/open-telemetry/opentelemetry-java/pull/6247)) + +## Version 1.35.0 (2024-02-09) + +**NOTE:** The `opentelemetry-exporter-jaeger` and `opentelemetry-exporter-jaeger-thift` artifacts +have stopped being published. Jaeger +has [native support for OTLP](https://opentelemetry.io/blog/2022/jaeger-native-otlp/), and users +should export to jaeger +using OTLP +instead. + +### API + +#### Incubator + +* Add Span#addLink, for adding a link after span start + ([#6084](https://github.com/open-telemetry/opentelemetry-java/pull/6084)) + +### SDK + +#### Traces + +* Bugfix: Ensure span status cannot be updated after set to StatusCode.OK + ([#6209](https://github.com/open-telemetry/opentelemetry-java/pull/6209) + +#### Metrics + +* Reusable memory Mode: Adding support for exponential histogram aggregation + ([#6058](https://github.com/open-telemetry/opentelemetry-java/pull/6058), + [#6136](https://github.com/open-telemetry/opentelemetry-java/pull/6136)) +* Reusable memory mode: Adding support for explicit histogram aggregation + ([#6153](https://github.com/open-telemetry/opentelemetry-java/pull/6153)) +* Reusable memory mode: Adding support for sum aggregation + ([#6182](https://github.com/open-telemetry/opentelemetry-java/pull/6182)) +* Reusable memory mode: Adding support for last value aggregation + ([#6196](https://github.com/open-telemetry/opentelemetry-java/pull/6196)) + +#### Exporters + +* Recreate / fix graal issue detecting RetryPolicy class + ([#6139](https://github.com/open-telemetry/opentelemetry-java/pull/6139), + [#6134](https://github.com/open-telemetry/opentelemetry-java/pull/6134)) +* Restore prometheus metric name mapper tests, fix regressions + ([#6138](https://github.com/open-telemetry/opentelemetry-java/pull/6138)) +* WARNING: Remove jaeger exporters + ([#6119](https://github.com/open-telemetry/opentelemetry-java/pull/6119)) +* Update dependency `io.zipkin.reporter2:zipkin-reporter-bom` to 3.2.1. + Note: `ZipkinSpanExporterBuilder#setEncoder(zipkin2.codec.BytesEncoder)` has been deprecated in + favor of `ZipkinSpanExporterBuilder#setEncoder(zipkin2.reporter.BytesEncoder)`. + `ZipkinSpanExporterBuilder#setSender(zipkin2.reporter.Sender)` has been deprecated in favor + of `ZipkinSpanExporterBuilder#setSender(zipkin2.reporter.BytesMessageSender)`. + ([#6129](https://github.com/open-telemetry/opentelemetry-java/pull/6129), + [#6151](https://github.com/open-telemetry/opentelemetry-java/pull/6151)) +* Include trace flags in otlp marshaller + ([#6167](https://github.com/open-telemetry/opentelemetry-java/pull/6167)) +* Add Compressor SPI support to OtlpGrpc{Signal}Exporters + ([#6103](https://github.com/open-telemetry/opentelemetry-java/pull/6103)) +* Allow Prometheus exporter to add resource attributes to metric attributes + ([#6179](https://github.com/open-telemetry/opentelemetry-java/pull/6179)) + +#### Extension + +* Autoconfigure accepts encoded header values for OTLP exporters + ([#6164](https://github.com/open-telemetry/opentelemetry-java/pull/6164)) +* Return implementation type from `AutoConfiguredOpenTelemetrySdkBuilder.addLogRecordProcessorCustomizer` + ([#6248](https://github.com/open-telemetry/opentelemetry-java/pull/6248)) + +#### Incubator + +* Align file configuration with latest changes to spec + ([#6088](https://github.com/open-telemetry/opentelemetry-java/pull/6088)) + +### Tooling + +* Stop including old artifacts in bom + ([#6157](https://github.com/open-telemetry/opentelemetry-java/pull/6157)) +* Define CODECOV token + ([#6186](https://github.com/open-telemetry/opentelemetry-java/pull/6186)) + +## Version 1.34.1 (2024-01-11) + +* Fix prometheus exporter regressions + ([#6138](https://github.com/open-telemetry/opentelemetry-java/pull/6138)) +* Fix native image regression + ([#6134](https://github.com/open-telemetry/opentelemetry-java/pull/6134)) + +## Version 1.34.0 (2024-01-05) + +**NOTE:** This is the LAST release for `opentelemetry-exporter-jaeger` +and `opentelemetry-exporter-jaeger-thift`. Jaeger +has [native support for OTLP](https://opentelemetry.io/blog/2022/jaeger-native-otlp/), and users +should export to jaeger +using OTLP +instead. + +### API + +* Ability to access version.properties API file with GraalVM native + ([#6095](https://github.com/open-telemetry/opentelemetry-java/pull/6095)) + +### SDK + +#### Traces + +* Only call SpanProcessor onStart / onEnd if required + ([#6112](https://github.com/open-telemetry/opentelemetry-java/pull/6112)) +* Add option to export unsampled spans from span processors + ([#6057](https://github.com/open-telemetry/opentelemetry-java/pull/6057)) + +#### Metrics + +* Memory Mode: Adding first part support for synchronous instruments - storage + ([#5998](https://github.com/open-telemetry/opentelemetry-java/pull/5998)) +* Base2ExponentialHistogramAggregation maxBuckets must be >= 2 + ([#6093](https://github.com/open-telemetry/opentelemetry-java/pull/6093)) +* Convert histogram measurements to double before passing recording exemplar reservoir + ([#6024](https://github.com/open-telemetry/opentelemetry-java/pull/6024)) + +#### Exporters + +* Add compressor SPI to support additional compression algos + ([#5990](https://github.com/open-telemetry/opentelemetry-java/pull/5990)) +* Test OTLP exporters with different OkHttp versions + ([#6045](https://github.com/open-telemetry/opentelemetry-java/pull/6045)) +* Refactor prometheus exporter to use `io.prometheus:prometheus-metrics-exporter-httpserver`, add + exponential Histogram support + ([#6015](https://github.com/open-telemetry/opentelemetry-java/pull/6015)) +* UpstreamGrpcSenderProvider uses minimal fallback managed channel when none is specified + ([#6110](https://github.com/open-telemetry/opentelemetry-java/pull/6110)) +* OTLP exporters propagate serialization IOException instead of rethrowing as runtime + ([#6082](https://github.com/open-telemetry/opentelemetry-java/pull/6082)) + +#### Extensions + +* Autoconfigure reads normalized otel.config.file property + ([#6105](https://github.com/open-telemetry/opentelemetry-java/pull/6105)) + +## Version 1.33.0 (2023-12-08) + +### API + +* Fix issue where wrapping "invalid" SpanContexts in Span does not preserve SpanContext + ([#6044](https://github.com/open-telemetry/opentelemetry-java/pull/6044)) + +#### Incubator + +* Refactor and add to ExtendedTracer, add ExtendedContextPropagators + ([#6017](https://github.com/open-telemetry/opentelemetry-java/pull/6017)) +* Base64 encode AnyValue bytes in string representation + ([#6003](https://github.com/open-telemetry/opentelemetry-java/pull/6003)) + +### SDK + +#### Exporters + +* Add connectTimeout configuration option OtlpHttp{Signal}Exporters + ([#5941](https://github.com/open-telemetry/opentelemetry-java/pull/5941)) +* Add ability for Otlp{Protocol}LogRecordExporter to serialize log body any value + ([#5938](https://github.com/open-telemetry/opentelemetry-java/pull/5938)) +* Android environments can now handle base64 encoded PEM keys, remove exception handling in + TlsUtil#decodePem + ([#6034](https://github.com/open-telemetry/opentelemetry-java/pull/6034)) +* Add header supplier configuration option to OTLP exporters + ([#6004](https://github.com/open-telemetry/opentelemetry-java/pull/6004)) + + +#### Extensions + +* Add autoconfigure option for customizing SpanProcessor, LogRecordProcessor + ([#5986](https://github.com/open-telemetry/opentelemetry-java/pull/5986)) +* Incubator allows for simpler creation of start-only and end-only SpanProcessors. + ([#5923](https://github.com/open-telemetry/opentelemetry-java/pull/5923)) + +#### Testing + +* Add hasAttributesSatisfying overload to AbstractPointAssert + ([#6048](https://github.com/open-telemetry/opentelemetry-java/pull/6048)) + +### Project Tooling + +* Building animal sniffer signatures directly from android corelib + ([#5973](https://github.com/open-telemetry/opentelemetry-java/pull/5973)) +* Target kotlin 1.6 in kotlin extension + ([#5910](https://github.com/open-telemetry/opentelemetry-java/pull/5910)) +* Define language version compatibility requirements + ([#5983](https://github.com/open-telemetry/opentelemetry-java/pull/5983)) + +## Version 1.32.0 (2023-11-13) + +### API + +* Stabilize explicit bucket boundaries advice API + ([#5897](https://github.com/open-telemetry/opentelemetry-java/pull/5897)) +* Allow events to be emitted with timestamp + ([#5928](https://github.com/open-telemetry/opentelemetry-java/pull/5928)) + +#### Context + +* Add null check to StrictContextStorage + ([#5954](https://github.com/open-telemetry/opentelemetry-java/pull/5954)) + +#### Incubator + +* Experimental support for Log AnyValue body + ([#5880](https://github.com/open-telemetry/opentelemetry-java/pull/5880)) + +### SDK + +#### Metrics + +* Dismantle AbstractInstrumentBuilder inheritance hierarchy + ([#5820](https://github.com/open-telemetry/opentelemetry-java/pull/5820)) +* Fix delta metric storage concurrency bug that allows for lost writes when record operations occur + during collection. The fix introduces additional work on record threads to ensure correctness. The + additional overhead is non-blocking and should be small according to performance testing. Still, + there may be an opportunity for further optimization. + ([#5932](https://github.com/open-telemetry/opentelemetry-java/pull/5932), + [#5976](https://github.com/open-telemetry/opentelemetry-java/pull/5976)) + + +#### Exporters + +* Prometheus exporter: omit empty otel_scope_info and otel_target_info metrics + ([#5887](https://github.com/open-telemetry/opentelemetry-java/pull/5887)) +* JdkHttpSender should retry on connect exceptions + ([#5867](https://github.com/open-telemetry/opentelemetry-java/pull/5867)) +* Expand the set of retryable exceptions in JdkHttpSender + ([#5942](https://github.com/open-telemetry/opentelemetry-java/pull/5942)) +* Identify OTLP export calls with context key used for instrumentation suppression + ([#5918](https://github.com/open-telemetry/opentelemetry-java/pull/5918)) + +#### Testing + +* Add log support to junit extensions + ([#5966](https://github.com/open-telemetry/opentelemetry-java/pull/5966)) + +#### SDK Extensions + +* Add file configuration to autoconfigure + ([#5831](https://github.com/open-telemetry/opentelemetry-java/pull/5831)) +* Update to file configuration to use opentelemetry-configuration v0.1.0 + ([#5899](https://github.com/open-telemetry/opentelemetry-java/pull/5899)) +* Add env var substitution support to file configuration + ([#5914](https://github.com/open-telemetry/opentelemetry-java/pull/5914)) +* Stop setting Resource schemaUrl in autoconfigure + ([#5911](https://github.com/open-telemetry/opentelemetry-java/pull/5911)) +* Add AutoConfigureListener to provide components with autoconfigured SDK + ([#5931](https://github.com/open-telemetry/opentelemetry-java/pull/5931)) + +### OpenCensus Shim + +* Clean up OpenCensus shim + ([#5858](https://github.com/open-telemetry/opentelemetry-java/pull/5858)) + +### OpenTracing Shim + +* Fix OpenTracing header name issue + ([#5840](https://github.com/open-telemetry/opentelemetry-java/pull/5840)) + +## Version 1.31.0 (2023-10-06) + +### API + +#### Incubator + +* Refactor advice API to simplify usage + ([#5848](https://github.com/open-telemetry/opentelemetry-java/pull/5848)) + +### SDK + +* BatchLogRecordProcessor and BatchSpanProcessor unify `queueSize` metric + description and attribute name for `processorType` + ([#5836](https://github.com/open-telemetry/opentelemetry-java/pull/5836)) + +#### Metrics + +* Allow instrument names to contain a forward slash + ([#5824](https://github.com/open-telemetry/opentelemetry-java/pull/5824)) +* Memory Mode support: Adding memory mode, and implementing it for Asynchronous Instruments + ([#5709](https://github.com/open-telemetry/opentelemetry-java/pull/5709), + [#5855](https://github.com/open-telemetry/opentelemetry-java/pull/5855)) +* Stabilize MetricProducer, allow custom MetricReaders + ([#5835](https://github.com/open-telemetry/opentelemetry-java/pull/5835)) +* Drop NaN measurements to metric instruments + ([#5859](https://github.com/open-telemetry/opentelemetry-java/pull/5859)) +* Fix flaky MetricExporterConfigurationTest + ([#5877](https://github.com/open-telemetry/opentelemetry-java/pull/5877)) + +#### Logs + +* Add addAllAttributes() to ReadWriteLogRecord. + ([#5825](https://github.com/open-telemetry/opentelemetry-java/pull/5825)) + +#### Exporters + +* Prometheus exporter: handle colliding metric attribute keys + ([#5717](https://github.com/open-telemetry/opentelemetry-java/pull/5717)) + +#### SDK Extensions + +* File configuration ConfigurationReader handles null values as empty + ([#5829](https://github.com/open-telemetry/opentelemetry-java/pull/5829)) + +#### Semantic conventions + +* BREAKING: Stop publishing `io.opentelemetry:opentelemetry-semconv`. Please use + `io.opentelemetry.semconv:opentelemetry-semconv:1.21.0-alpha` instead, which is published + from [open-telemetry/semantic-conventions-java](https://github.com/open-telemetry/semantic-conventions-java). + The new repository is published in lockstep + with [open-telemetry/semantic-conventions](https://github.com/open-telemetry/semantic-conventions). + ([#5807](https://github.com/open-telemetry/opentelemetry-java/pull/5807)) + +### Project Tooling + +* Add Benchmark workflows + ([#5842](https://github.com/open-telemetry/opentelemetry-java/pull/5842), + [#5874](https://github.com/open-telemetry/opentelemetry-java/pull/5874)) +* Add clearer docs around coroutine support with an example + ([#5799](https://github.com/open-telemetry/opentelemetry-java/pull/5799)) + +## Version 1.30.1 (2023-09-11) + +* Fix autoconfigure bug creating multiple `PrometheusHttpServer` instances with same port + ([#5811](https://github.com/open-telemetry/opentelemetry-java/pull/5811)) + +## Version 1.30.0 (2023-09-08) ### API @@ -204,7 +1208,7 @@ The log bridge API / SDK are now stable! Some important notes: of `otel.logs.exporter` from `none` to `otlp`. NOTE: reminder that -the [Logs Bridge API](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/logs/bridge-api.md) +the [Logs Bridge API](https://github.com/open-telemetry/opentelemetry-specification/blob/v1.21.0/specification/logs/bridge-api.md) is _not_ meant for end users. Log appenders use the API to bridge logs from existing log frameworks (e.g. JUL, Log4j, SLf4J, Logback) into OpenTelemetry. Users configure the Log SDK to dictate how logs are processed and exported. @@ -272,7 +1276,7 @@ merged into `opentelemetry-exporter-otlp`, `opentelemetry-sdk-logs-testing` will into `opentelemetry-sdk-testing`, `opentelemetry-sdk-extension-autoconfigure` will enable `otlp` log exporter by default (i.e. `otel.logs.exporter=otlp`). For more details, see tracking issue [#5340](https://github.com/open-telemetry/opentelemetry-java/issues/5340). NOTE: reminder that -the [Logs Bridge API](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/logs/bridge-api.md) +the [Logs Bridge API](https://github.com/open-telemetry/opentelemetry-specification/blob/v1.21.0/specification/logs/bridge-api.md) is _not_ meant for end users. Log appenders use the API to bridge logs from existing log frameworks (e.g. JUL, Log4j, SLf4J, Logback) into OpenTelemetry. Users configure the Log SDK to dictate how logs are processed and exported. @@ -834,8 +1838,8 @@ log API component has been added for emitting events and for writing log appende API is not a substitute for traditional log frameworks like Log4j, JUL, SLF4J, or Logback. While the event portion of the API is intended for instrumentation authors and end users, the API for emitting LogRecords is not. -See [LoggerProvider](./api/logs/src/main/java/io/opentelemetry/api/logs/LoggerProvider.java) -and [Logger](./api/logs/src/main/java/io/opentelemetry/api/logs/Logger.java) javadoc for more +See [LoggerProvider](./api/all/src/main/java/io/opentelemetry/api/logs/LoggerProvider.java) +and [Logger](./api/all/src/main/java/io/opentelemetry/api/logs/Logger.java) javadoc for more details. ### General @@ -1121,7 +2125,7 @@ details. stable `opentelemetry-sdk-extension-autoconfigure-spi`. * Autoconfigure now supports multiple values for `otel.metrics.exporter`. * Autoconfigure now - supports [general attribute limits](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/sdk-environment-variables.md#attribute-limits), + supports [general attribute limits](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/configuration/sdk-environment-variables.md#attribute-limits), applicable to span attributes, span event attributes, span link attributes, and log attributes. * Autoconfigure now supports an experimental option to disable the SDK. If `otel.experimental.sdk.enabled=true`, `AutoConfiguredOpenTelemetrySdk#getOpenTelemetrySdk()` @@ -1371,7 +2375,7 @@ should not be many. Thanks for bearing with us on this. ### General * Examples moved - to [opentelemetry-java-docs](https://github.com/open-telemetry/opentelemetry-java-docs) + to [opentelemetry-java-examples](https://github.com/open-telemetry/opentelemetry-java-examples) ### SDK @@ -2681,7 +3685,7 @@ See the `opentelemetry-extension-kotlin` module for details. #### Breaking changes -- There have been many updates to the semantic conventions constants. The constants are now auto-generated from the YAML specification files, so the names will now be consistent across languages. For more information, see the [YAML Model for Semantic Conventions](https://github.com/open-telemetry/opentelemetry-specification/tree/master/semantic_conventions). +- There have been many updates to the semantic conventions constants. The constants are now auto-generated from the YAML specification files, so the names will now be consistent across languages. For more information, see the [YAML Model for Semantic Conventions](https://github.com/open-telemetry/semantic-conventions/tree/main/model#yaml-model-for-semantic-conventions). - All API classes have been moved into the `io.opentelemetry.api.` prefix to support JPMS users. - The API no longer uses the `grpc-context` as the context implementation. It now uses `io.opentelemetry.context.Context`. This is published in the `opentelemetry-context` artifact. Interactions with the context were mostly moved to static methods in the `Span` and `Baggage` interfaces. - The Baggage API has been reworked to more closely match the specification. This includes the removal of the `BaggageManager`. Baggage is fully functional within the API, without needing to install an SDK. @@ -2712,7 +3716,7 @@ See the `opentelemetry-extension-kotlin` module for details. #### Breaking changes -- `TraceConfig` configuration option names (environment variables and system properties) were renamed to match the OpenTelemetery Specification. For more information, see [TraceConfig](./QUICKSTART.md#TraceConfig). +- `TraceConfig` configuration option names (environment variables and system properties) were renamed to match the OpenTelemetery Specification. - The Jaeger gRPC exporter was updated to match the OpenTelemetry Specification. The `message` log entry attribute has been renamed to `event` and a new `dropped attributes count` attribute was added. For more information, see the [Overview](https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/overview.md). - The `SpanData.getHasRemoteParent()` and `SpanData.getHasEnded()` methods were renamed to `hasRemoteParent()` and `hasEnded()`, respectively. - The `IdsGenerator` interface has been renamed to `IdGenerator`, and all implementations and relevant factory methods were similarly renamed. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8171d76655b..5c02e405c0d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,12 +3,12 @@ Welcome to OpenTelemetry Java repository! Before you start - see OpenTelemetry general -[contributing](https://github.com/open-telemetry/community/blob/main/CONTRIBUTING.md) +[contributing](https://github.com/open-telemetry/community/blob/main/guides/contributor/README.md) requirements and recommendations. If you want to add new features or change behavior, please make sure your changes follow the [OpenTelemetry Specification](https://github.com/open-telemetry/opentelemetry-specification). -Otherwise file an issue or submit a PR to the specification repo first. +Otherwise, file an issue or submit a pull request (PR) to the specification repo first. Make sure to review the projects [license](LICENSE) and sign the [CNCF CLA](https://identity.linuxfoundation.org/projects/cncf). A signed CLA will be enforced by an @@ -52,7 +52,8 @@ $ ./gradlew check Note: this gradle task will potentially generate changes to files in the `docs/apidiffs/current_vs_latest` -directory. Please make sure to include any changes to these files in your pull request. +directory. Please make sure to include any changes to these files in your pull request (i.e. +add those files to your commits in the PR). ## PR Review @@ -60,6 +61,14 @@ After you submit a PR, it will be reviewed by the project maintainers and approv maintainers need to review a particular PR, but merging to the base branch is authorized to restricted members (administrators). +### Draft PRs + +Draft PRs are welcome, especially when exploring new ideas or experimenting with a hypothesis. +However, draft PRs may not receive the same degree of attention, feedback, or scrutiny unless +requested directly. In order to help keep the PR backlog maintainable, drafts older than 6 months +will be closed by the project maintainers. This should not be interpreted as a rejection. Closed +PRs may be reopened by the author when time or interest allows. + ## Project Scope `opentelemetry-java` is one of several repositories which comprise the OpenTelemetry Java ecosystem, @@ -69,15 +78,13 @@ which implement concepts defined in the [opentelemetry-specification](https://github.com/open-telemetry/opentelemetry-specification), with a few exceptions / comments: -* The [API incubator](./extensions/incubator) and [SDK incubator](./extensions/incubator) +* The [API incubator](./api/incubator) and [SDK incubator](./sdk-extensions/incubator) contain prototypes which have been discussed in the specification or [oteps](https://github.com/open-telemetry/oteps) and have a reasonable chance of becoming part of the specification, subject to maintainers' discretion. * Components like the [Kotlin Extension](./extensions/kotlin) are included which are required for the API / SDK to function in key areas of the Java ecosystem. Inclusion is subject to maintainers' discretion. -* The [semconv](./semconv) module contains generated classes representing - the [semantic conventions](https://github.com/open-telemetry/semantic-conventions). * As a general rule, components which implement semantic conventions belong elsewhere. Other repositories in the OpenTelemetry Java ecosystem include: @@ -86,7 +93,7 @@ Other repositories in the OpenTelemetry Java ecosystem include: contains instrumentation. * [opentelemetry-java-contrib](https://github.com/open-telemetry/opentelemetry-java-contrib) contains extensions, prototypes, and instrumentation, including vendor specific components. -* [opentelemetry-java-docs](https://github.com/open-telemetry/opentelemetry-java-docs) contains +* [opentelemetry-java-examples](https://github.com/open-telemetry/opentelemetry-java-examples) contains working code snippets demonstrating various concepts. ## Style guideline @@ -131,6 +138,21 @@ uses [google-java-format](https://github.com/google/google-java-format) library: * Adding `toString()` overrides on classes is encouraged, but we only use `toString()` to provide debugging assistance. The implementations of all `toString()` methods should be considered to be unstable unless explicitly documented otherwise. +* Avoid synchronizing using a class's intrinsic lock. Instead, synchronize on a dedicated lock object. E.g: + ```java + private final Object lock = new Object(); + + public void doSomething() { + synchronized (lock) { ... } + } + ``` +* Don't + use [gradle test fixtures](https://docs.gradle.org/current/userguide/java_testing.html#sec:java_test_fixtures) ( + i.e. `java-test-fixtures` plugin) to reuse code for internal testing. The test fixtures plugin has + side effects where test dependencies are added to the `pom.xml` and publishes an + extra `*-test-fixtures.jar` artifact which is unnecessary for internal testing. Instead, create a + new `*:testing-internal` module and omit the `otel.java-conventions`. For example, + see [/exporters/otlp/testing-internal](./exporters/otlp/testing-internal). If you notice any practice being applied in the project consistently that isn't listed here, please consider a pull request to add it. @@ -168,6 +190,11 @@ in the guide for exceptions to the Javadoc requirement. * Our javadoc is available via [ javadoc.io}(https://javadoc.io/doc/io.opentelemetry/opentelemetry-api) +### SDK Configuration Documentation + +All changes to the SDK configuration options or autoconfigure module should be documented on +[opentelemetry.io](https://opentelemetry.io/docs/languages/java/configuration/). + ### AutoValue * Use [AutoValue](https://github.com/google/auto/tree/master/value), when possible, for any new @@ -230,13 +257,18 @@ Example usage could be as follows: } ``` + Please confirm whether the local opentelemetry-java version is consistent with the + opentelemetry-java version declared in the project that relies on opentelemetry-java. + If it is inconsistent, `dependencySubstitution` may not take effect. + See [the Gradle documentation](https://docs.gradle.org/current/userguide/composite_builds.html#included_build_declaring_substitutions) for more information. - 4. If you now build your project, it will use the included build to supply the opentelemetry-java artifacts, ignoring any version declarations. Use the prefix `:DIRECTORY:` to refer to tasks/projects within the included build, where DIRECTORY is the name of the directory in the included build (only the part after the last `/`). +5. Here are some issues and solutions ([discussions/6551](https://github.com/open-telemetry/opentelemetry-java/discussions/6551)) + you may encounter that may be helpful to you. ### Updating the OTLP protobufs diff --git a/README.md b/README.md index e912e7ab4ce..9178908bdcb 100644 --- a/README.md +++ b/README.md @@ -1,169 +1,178 @@ # OpenTelemetry Java + [![Continuous Build][ci-image]][ci-url] [![Coverage Status][codecov-image]][codecov-url] [![Maven Central][maven-image]][maven-url] +[![Reproducible Builds](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/jvm-repo-rebuild/reproducible-central/master/content/io/opentelemetry/java/badge.json)](https://github.com/jvm-repo-rebuild/reproducible-central/blob/master/content/io/opentelemetry/java/README.md) +[![OpenSSF Scorecard](https://api.scorecard.dev/projects/github.com/open-telemetry/opentelemetry-java/badge)](https://scorecard.dev/viewer/?uri=github.com/open-telemetry/opentelemetry-java) +[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/9991/badge)](https://www.bestpractices.dev/projects/9991) -## Project Status +`opentelemetry-java` is the home of the Java implementation of the OpenTelemetry API for recording +telemetry, and SDK for managing telemetry recorded by the API. -See [Java status on OpenTelemetry.io][otel-java-status]. +See [opentelemetry.io Java Documentation](https://opentelemetry.io/docs/languages/java/intro/) for: -## Getting Started +* An overview of the OpenTelemetry Java ecosystem and key repositories +* Detailed documentation on the components published from this repository +* Review of instrumentation ecosystem, including OpenTelemetry Java agent +* End-to-end working code examples +* And more -If you are looking for an all-in-one, easy-to-install **auto-instrumentation -javaagent**, see [opentelemetry-java-instrumentation][]. +> [!IMPORTANT] +> We are currently seeking additional contributors! See [help wanted](#help-wanted) for details. -If you are looking for **examples** on how to use the OpenTelemetry API to -write your own **manual instrumentation**, or how to set up the OpenTelemetry -Java SDK, see [Manual instrumentation][]. Fully-functional examples -are available in [opentelemetry-java-docs][]. +## Requirements -For a general overview of OpenTelemetry, visit [opentelemetry.io][]. +Unless otherwise noted, all published artifacts support Java 8 or higher. +See [language version compatibility](VERSIONING.md#language-version-compatibility) for complete +details. -Would you like to get involved with the project? Read our [contributing guide](CONTRIBUTING.md). We welcome -contributions! +**Android Disclaimer:** For compatibility +reasons, [library desugaring](https://developer.android.com/studio/write/java8-support#library-desugaring) +must be enabled. -## Contacting us +See [contributing](#contributing) for details on building this project locally. -We hold regular meetings. See details at [community page](https://github.com/open-telemetry/community#java-sdk). +## Releases -We use [GitHub Discussions](https://github.com/open-telemetry/opentelemetry-java/discussions) -for support or general questions. Feel free to drop us a line. +Releases are published to maven central. We +publish [minor releases monthly](RELEASING.md#release-cadence) +and [patch releases as needed](RELEASING.md#preparing-a-new-patch-release). -We are also present in the [`#otel-java`](https://cloud-native.slack.com/archives/C014L2KCTE3) channel in the [CNCF slack](https://slack.cncf.io/). -Please join us for more informal discussions. +See [releases](https://github.com/open-telemetry/opentelemetry-java/releases) for a listing of +released versions and notes (see also [changelog](CHANGELOG.md)). -## Overview +## Artifacts -OpenTelemetry is the merging of OpenCensus and OpenTracing into a single project. +The artifacts published by this repository are summarized below in tables, organized in collapsible +sections by topic. -This project contains the following top level components: +As discussed in [compatibility](#compatibility), artifact versions must be kept in sync, for which +we strongly recommend [using one of our BOMs][dependencies-and-boms]. -* [OpenTelemetry API](api/): - * [stable apis](api/all/src/main/java/io/opentelemetry/api/) including `Tracer`, `Span`, `SpanContext`, `Meter`, and `Baggage` - * [semantic conventions](semconv/) Generated code for the OpenTelemetry semantic conventions. - * [context api](context/src/main/java/io/opentelemetry/context/) The OpenTelemetry Context implementation. -* [extensions](extensions/) define additional API extensions, which are not part of the core API. -* [sdk](sdk/) defines the implementation of the OpenTelemetry API. -* [sdk-extensions](sdk-extensions/) defines additional SDK extensions, which are not part of the core SDK. -* [OpenTracing shim](opentracing-shim/) defines a bridge layer from OpenTracing to the OpenTelemetry API. -* [OpenCensus shim](opencensus-shim/) defines a bridge layer from OpenCensus to the OpenTelemetry API. +
+ Bill of Materials (BOMs) -This project publishes a lot of artifacts, listed in [releases](#releases). -[`opentelemetry-bom`](https://mvnrepository.com/artifact/io.opentelemetry/opentelemetry-bom) (BOM = -Bill of Materials) is provided to assist with synchronizing versions of -dependencies. [`opentelemetry-bom-alpha`](https://mvnrepository.com/artifact/io.opentelemetry/opentelemetry-bom-alpha) -provides the same function for unstable artifacts. See [published releases](#published-releases) for -instructions on using the BOMs. +A bill of materials (or BOM) helps sync dependency versions of related artifacts. -We would love to hear from the larger community: please provide feedback proactively. +| Component | Description | Artifact ID | Version | Javadoc | +|----------------------------------------------|----------------------------------------|---------------------------|-------------------------------------------------------------|---------| +| [Bill of Materials (BOM)](./bom) | Bill of materials for stable artifacts | `opentelemetry-bom` | 1.48.0 | N/A | +| [Alpha Bill of Materials (BOM)](./bom-alpha) | Bill of materials for alpha artifacts | `opentelemetry-bom-alpha` | 1.48.0-alpha | N/A | +
-## Requirements +
+ API -Unless otherwise noted, all published artifacts support Java 8 or higher. +The OpenTelemetry API for recording telemetry. -**Android Disclaimer:** For compatibility reasons, [library desugaring](https://developer.android.com/studio/write/java8-support#library-desugaring) must be enabled. +| Component | Description | Artifact ID | Version | Javadoc | +|-----------------------------------|--------------------------------------------------------------------------------------|-------------------------------|-------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [API](./api/all) | OpenTelemetry API, including metrics, traces, baggage, context | `opentelemetry-api` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-api.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-api) | +| [API Incubator](./api/incubator) | API incubator, including pass through propagator, and extended tracer, and Event API | `opentelemetry-api-incubator` | 1.48.0-alpha | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-api-incubator.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-api-incubator) | +| [Context API](./context) | OpenTelemetry context API | `opentelemetry-context` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-context.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-context) | +
-See [CONTRIBUTING.md](./CONTRIBUTING.md) for additional instructions for building this project for development. +
+ API Extensions -### Note about extensions +Extensions to the OpenTelemetry API. -Both API and SDK extensions consist of various additional components which are excluded from the core artifacts -to keep them from growing too large. +| Component | Description | Artifact ID | Version | Javadoc | +|---------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------|-------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [Kotlin Extension](./extensions/kotlin) | Context extension for coroutines | `opentelemetry-extension-kotlin` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-extension-kotlin.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-extension-kotlin) | +| [Trace Propagators Extension](./extensions/trace-propagators) | Trace propagators, including B3, Jaeger, OT Trace | `opentelemetry-extension-trace-propagators` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-extension-trace-propagators.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-extension-trace-propagators) | +
-We still aim to provide the same level of quality and guarantee for them as for the core components. -Please don't hesitate to use them if you find them useful. +
+ SDK -## Project setup and contributing +The OpenTelemetry SDK for managing telemetry producing by the API. -Please refer to the [contribution guide](CONTRIBUTING.md) on how to set up for development and contribute! +| Component | Description | Artifact ID | Version | Javadoc | +|------------------------------|--------------------------------------------------------|-----------------------------|---------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [SDK](./sdk/all) | OpenTelemetry SDK, including metrics, traces, and logs | `opentelemetry-sdk` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk) | +| [Metrics SDK](./sdk/metrics) | OpenTelemetry metrics SDK | `opentelemetry-sdk-metrics` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-metrics.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-metrics) | +| [Trace SDK](./sdk/trace) | OpenTelemetry trace SDK | `opentelemetry-sdk-trace` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-trace.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-trace) | +| [Log SDK](./sdk/logs) | OpenTelemetry log SDK | `opentelemetry-sdk-logs` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-logs.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-logs) | +| [SDK Common](./sdk/common) | Shared SDK components | `opentelemetry-sdk-common` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-common.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-common) | +| [SDK Testing](./sdk/testing) | Components for testing OpenTelemetry instrumentation | `opentelemetry-sdk-testing` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-testing.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-testing) | +
-## Published Releases +
+ SDK Exporters -Published releases are available on maven central. We strongly recommend using our published BOM to keep all -dependency versions in sync. +SDK exporters for shipping traces, metrics, and logs out of process. -### Maven +| Component | Description | Artifact ID | Version | Javadoc | +|-----------------------------------------------------------------------|------------------------------------------------------------------------------|------------------------------------------------------|-------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [OTLP Exporters](./exporters/otlp/all) | OTLP gRPC & HTTP exporters, including traces, metrics, and logs | `opentelemetry-exporter-otlp` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-otlp.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-otlp) | +| [OTLP Logging Exporters](./exporters/logging-otlp) | Logging exporters in OTLP JSON encoding, including traces, metrics, and logs | `opentelemetry-exporter-logging-otlp` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-logging-otlp.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-logging-otlp) | +| [OTLP Common](./exporters/otlp/common) | Shared OTLP components (internal) | `opentelemetry-exporter-otlp-common` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-otlp-common.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-otlp-common) | +| [Logging Exporter](./exporters/logging) | Logging exporters, including metrics, traces, and logs | `opentelemetry-exporter-logging` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-logging.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-logging) | +| [Zipkin Exporter](./exporters/zipkin) | Zipkin trace exporter | `opentelemetry-exporter-zipkin` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-zipkin.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-zipkin) | +| [Prometheus Exporter](./exporters/prometheus) | Prometheus metric exporter | `opentelemetry-exporter-prometheus` | 1.48.0-alpha | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-prometheus.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-prometheus) | +| [Exporter Common](./exporters/common) | Shared exporter components (internal) | `opentelemetry-exporter-common` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-common.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-common) | +| [OkHttp Sender](./exporters/sender/okhttp) | OkHttp implementation of HttpSender (internal) | `opentelemetry-exporter-sender-okhttp` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-sender-okhttp.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-sender-okhttp) | +| [JDK Sender](./exporters/sender/jdk) | Java 11+ native HttpClient implementation of HttpSender (internal) | `opentelemetry-exporter-sender-jdk` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-sender-jdk.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-sender-jdk) | | +| [gRPC ManagedChannel Sender](./exporters/sender/grpc-managed-channel) | gRPC ManagedChannel implementation of GrpcSender (internal) | `opentelemetry-exporter-sender-grpc-managed-channel` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-sender-grpc-managed-channel.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-sender-grpc-managed-channel) | | +
+ +
+ SDK Extensions + +Extensions to the OpenTelemetry SDK. -```xml - - - - - io.opentelemetry - opentelemetry-bom - 1.29.0 - pom - import - - - - - - io.opentelemetry - opentelemetry-api - - - -``` +| Component | Description | Artifact ID | Version | Javadoc | +|-------------------------------------------------------------------------------|------------------------------------------------------------------------------------|-----------------------------------------------------|-------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [SDK Autoconfigure](./sdk-extensions/autoconfigure) | Autoconfigure OpenTelemetry SDK from env vars, system properties, and SPI | `opentelemetry-sdk-extension-autoconfigure` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-extension-autoconfigure.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-extension-autoconfigure) | +| [SDK Autoconfigure SPI](./sdk-extensions/autoconfigure-spi) | Service Provider Interface (SPI) definitions for autoconfigure | `opentelemetry-sdk-extension-autoconfigure-spi` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-extension-autoconfigure-spi.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-extension-autoconfigure-spi) | +| [SDK Jaeger Remote Sampler Extension](./sdk-extensions/jaeger-remote-sampler) | Sampler which obtains sampling configuration from remote Jaeger server | `opentelemetry-sdk-extension-jaeger-remote-sampler` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-extension-jaeger-remote-sampler.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-extension-jaeger-remote-sampler) | +| [SDK Incubator](./sdk-extensions/incubator) | SDK incubator, including YAML based view configuration, LeakDetectingSpanProcessor | `opentelemetry-sdk-extension-incubator` | 1.48.0-alpha | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-extension-incubator.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-extension-incubator) | +
-### Gradle +
+ Shims -```groovy -dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.29.0") - implementation('io.opentelemetry:opentelemetry-api') -} -``` +Shims for bridging data from one observability library to another. -Note that if you want to use any artifacts that have not fully stabilized yet (such as the [semantic conventions constants](https://github.com/open-telemetry/opentelemetry-java/tree/main/semconv), then you will need to add an entry for the Alpha BOM as well, e.g. +| Component | Description | Artifact ID | Version | Javadoc | +|----------------------------------------|--------------------------------------------------------------|----------------------------------|-------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [OpenCensus Shim](./opencensus-shim) | Bridge opencensus metrics into the OpenTelemetry metrics SDK | `opentelemetry-opencensus-shim` | 1.48.0-alpha | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-opencensus-shim.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-opencensus-shim) | +| [OpenTracing Shim](./opentracing-shim) | Bridge opentracing spans into the OpenTelemetry trace API | `opentelemetry-opentracing-shim` | 1.48.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-opentracing-shim.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-opentracing-shim) | +
-```groovy -dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.29.0") - implementation platform('io.opentelemetry:opentelemetry-bom-alpha:1.29.0-alpha') +## Dependencies - implementation('io.opentelemetry:opentelemetry-api') - implementation('io.opentelemetry:opentelemetry-semconv') - implementation('io.opentelemetry:opentelemetry-sdk-extension-autoconfigure') -} -``` +To take a dependency, [include a BOM][dependencies-and-boms] and specify the dependency as follows, +replacing `{{artifact-id}}` with the value from the "Artifact ID" column +from [artifacts](#artifacts): -## Snapshots +
+ Gradle -Snapshots based out the `main` branch are available for `opentelemetry-api`, `opentelemetry-sdk` and the rest of the artifacts. -We strongly recommend using our published BOM to keep all dependency versions in sync. +```groovy + implementation('io.opentelemetry:{{artifact-id}}') +``` +
-### Maven +
+ Maven ```xml - - - - oss.sonatype.org-snapshot - https://oss.sonatype.org/content/repositories/snapshots - - - - - - io.opentelemetry - opentelemetry-bom - 1.30.0-SNAPSHOT - pom - import - - - - - - io.opentelemetry - opentelemetry-api - - - + + io.opentelemetry + {{artifact-id}} + ``` +
+ +### Snapshots + +Snapshots of the `main` branch are available as follows: -### Gradle +
+ Gradle ```groovy repositories { @@ -171,152 +180,118 @@ repositories { } dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.30.0-SNAPSHOT") + implementation platform("io.opentelemetry:opentelemetry-bom:1.49.0-SNAPSHOT") implementation('io.opentelemetry:opentelemetry-api') } ``` +
-Libraries will usually only need `opentelemetry-api`, while applications -will want to use the `opentelemetry-sdk` module which contains our standard implementation -of the APIs. - -## Gradle composite builds - -For opentelemetry-java developers that need to test the latest source code with another -project, composite builds can be used as an alternative to `publishToMavenLocal`. This -requires some setup which is explained [here](CONTRIBUTING.md#composing-builds). - -## Releases - -See the [VERSIONING.md](VERSIONING.md) document for our policies for releases and compatibility -guarantees. - -Check out information about the [latest release](https://github.com/open-telemetry/opentelemetry-java/releases). - -See the project [milestones](https://github.com/open-telemetry/opentelemetry-java/milestones) -for details on upcoming releases. The dates and features described in issues -and milestones are estimates, and subject to change. - -The following tables describe the artifacts published by this project. To take a dependency, follow -the instructions in [Published Released](#published-releases) to include the BOM, and specify the -dependency as follows, replacing `{{artifact-id}}` with the value from the "Artifact ID" column: +
+ Maven ```xml - - io.opentelemetry - {{artifact-id}} - -``` - -```groovy - implementation('io.opentelemetry:{{artifact-id}}') + + + + oss.sonatype.org-snapshot + https://oss.sonatype.org/content/repositories/snapshots + + + + + + io.opentelemetry + opentelemetry-bom + 1.49.0-SNAPSHOT + pom + import + + + + + + io.opentelemetry + opentelemetry-api + + + ``` +
-### Bill of Material +## Compatibility -| Component | Description | Artifact ID | Version | Javadoc | -|----------------------------------------------|----------------------------------------|---------------------------|-------------------------------------------------------------|---------| -| [Bill of Materials (BOM)](./bom) | Bill of materials for stable artifacts | `opentelemetry-bom` | 1.29.0 | N/A | -| [Alpha Bill of Materials (BOM)](./bom-alpha) | Bill of materials for alpha artifacts | `opentelemetry-bom-alpha` | 1.29.0-alpha | N/A | +Artifacts from this repository follow semantic versioning. -### API +Stable artifacts (i.e. artifacts without `-alpha` version suffix) come with strong backwards +compatibility guarantees for public APIs. +Artifacts may depend on other artifacts from this repository, and may depend on internal APIs (i.e. +non-public APIs) which are subject to change across minor versions. Therefore, it's critical to keep +artifact versions in sync in order to avoid possible runtime exceptions. We strongly +recommend [using one of our BOMs][dependencies-and-boms] to assist in keeping artifacts in sync. -| Component | Description | Artifact ID | Version | Javadoc | -| --------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------- | ----------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [API](./api/all) | OpenTelemetry API, including metrics, traces, baggage, context | `opentelemetry-api` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-api.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-api) | -| [Events API](./api/events) | OpenTelemetry Event API for emitting events. | `opentelemetry-api-events` | 1.29.0-alpha | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-api-events.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-api-events) | -| [Context API](./context) | OpenTelemetry context API | `opentelemetry-context` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-context.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-context) | -| [Semantic Conventions](./semconv) | Generated code for OpenTelemetry semantic conventions (deprecated, moved to [open-telemetry/semantic-conventions-java](https://github.com/open-telemetry/semantic-conventions-java)) | `opentelemetry-semconv` | 1.29.0-alpha | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-semconv.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-semconv) | +See the [VERSIONING.md](VERSIONING.md) for complete details on compatibility policy. -### API Extensions - -| Component | Description | Artifact ID | Version | Javadoc | -|---------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------|-------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [Kotlin Extension](./extensions/kotlin) | Context extension for coroutines | `opentelemetry-extension-kotlin` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-extension-kotlin.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-extension-kotlin) | -| [Trace Propagators Extension](./extensions/trace-propagators) | Trace propagators, including B3, Jaeger, OT Trace | `opentelemetry-extension-trace-propagators` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-extension-trace-propagators.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-extension-trace-propagators) | -| [Incubator Extension](./extensions/incubator) | API incubator, including pass through propagator, and extended tracer | `opentelemetry-extension-incubator` | 1.29.0-alpha | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-extension-incubator.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-extension-incubator) | - -### SDK +## Contacting us -| Component | Description | Artifact ID | Version | Javadoc | -|------------------------------|--------------------------------------------------------|-----------------------------|---------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [SDK](./sdk/all) | OpenTelemetry SDK, including metrics, traces, and logs | `opentelemetry-sdk` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk) | -| [Metrics SDK](./sdk/metrics) | OpenTelemetry metrics SDK | `opentelemetry-sdk-metrics` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-metrics.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-metrics) | -| [Trace SDK](./sdk/trace) | OpenTelemetry trace SDK | `opentelemetry-sdk-trace` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-trace.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-trace) | -| [Log SDK](./sdk/logs) | OpenTelemetry log SDK | `opentelemetry-sdk-logs` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-logs.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-logs) | -| [SDK Common](./sdk/common) | Shared SDK components | `opentelemetry-sdk-common` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-common.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-common) | -| [SDK Testing](./sdk/testing) | Components for testing OpenTelemetry instrumentation | `opentelemetry-sdk-testing` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-testing.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-testing) | +We hold regular meetings. See details at [community page](https://github.com/open-telemetry/community#java-sdk). -### SDK Exporters +To report a bug, or request a new feature, +please [open an issue](https://github.com/open-telemetry/opentelemetry-java/issues/new/choose). -| Component | Description | Artifact ID | Version | Javadoc | -|-----------------------------------------------------------------------|------------------------------------------------------------------------------|------------------------------------------------------|-------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [OTLP Exporters](./exporters/otlp/all) | OTLP gRPC & HTTP exporters, including traces, metrics, and logs | `opentelemetry-exporter-otlp` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-otlp.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-otlp) | -| [OTLP Logging Exporters](./exporters/logging-otlp) | Logging exporters in OTLP JSON encoding, including traces, metrics, and logs | `opentelemetry-exporter-logging-otlp` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-logging-otlp.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-logging-otlp) | -| [OTLP Common](./exporters/otlp/common) | Shared OTLP components (internal) | `opentelemetry-exporter-otlp-common` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-otlp-common.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-otlp-common) | -| [Jaeger gRPC Exporter](./exporters/jaeger) | Jaeger gRPC trace exporter (deprecated [1]) | `opentelemetry-exporter-jaeger` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-jaeger.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-jaeger) | -| [Jaeger Thrift Exporter](./exporters/jaeger-thrift) | Jaeger thrift trace exporter (deprecated [1]) | `opentelemetry-exporter-jaeger-thift` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-jaeger-thrift.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-jaeger-thrift) | -| [Logging Exporter](./exporters/logging) | Logging exporters, including metrics, traces, and logs | `opentelemetry-exporter-logging` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-logging.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-logging) | -| [Zipkin Exporter](./exporters/zipkin) | Zipkin trace exporter | `opentelemetry-exporter-zipkin` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-zipkin.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-zipkin) | -| [Prometheus Exporter](./exporters/prometheus) | Prometheus metric exporter | `opentelemetry-exporter-prometheus` | 1.29.0-alpha | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-prometheus.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-prometheus) | -| [Exporter Common](./exporters/common) | Shared exporter components (internal) | `opentelemetry-exporter-common` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-common.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-common) | -| [OkHttp Sender](./exporters/sender/okhttp) | OkHttp implementation of HttpSender (internal) | `opentelemetry-exporter-sender-okhttp` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-sender-okhttp.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-sender-okhttp) | -| [JDK Sender](./exporters/sender/okhttp) | Java 11+ native HttpClient implementation of HttpSender (internal) | `opentelemetry-exporter-sender-jdk` | 1.29.0-alpha | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-sender-jdk.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-sender-jdk) | | -| [gRPC ManagedChannel Sender](./exporters/sender/grpc-managed-channel) | gRPC ManagedChannel implementation of GrpcSender (internal) | `opentelemetry-exporter-sender-grpc-managed-channel` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-exporter-sender-grpc-managed-channel.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-exporter-sender-grpc-managed-channel) | | - -**[1]**: Jaeger now -has [native support for OTLP](https://opentelemetry.io/blog/2022/jaeger-native-otlp/) and jaeger -exporters are now deprecated. `opentelemetry-exporter-jaeger-thrift` will continue to be published -until 1.34.0 (January 2024) but no new PRs will be accepted except security related bugfixes. After -1.34.0, `io.opentelemetry:opentelemetry-bom` will reference the last published version, but no -additional versions will be published. - -### SDK Extensions +We use [GitHub Discussions](https://github.com/open-telemetry/opentelemetry-java/discussions) +for support or general questions. Feel free to drop us a line. -| Component | Description | Artifact ID | Version | Javadoc | -|-------------------------------------------------------------------------------|------------------------------------------------------------------------------------|-----------------------------------------------------|-------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [SDK Autoconfigure](./sdk-extensions/autoconfigure) | Autoconfigure OpenTelemetry SDK from env vars, system properties, and SPI | `opentelemetry-sdk-extension-autoconfigure` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-extension-autoconfigure.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-extension-autoconfigure) | -| [SDK Autoconfigure SPI](./sdk-extensions/autoconfigure-spi) | Service Provider Interface (SPI) definitions for autoconfigure | `opentelemetry-sdk-extension-autoconfigure-spi` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-extension-autoconfigure-spi.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-extension-autoconfigure-spi) | -| [SDK Jaeger Remote Sampler Extension](./sdk-extensions/jaeger-remote-sampler) | Sampler which obtains sampling configuration from remote Jaeger server | `opentelemetry-sdk-extension-jaeger-remote-sampler` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-extension-jaeger-remote-sampler.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-extension-jaeger-remote-sampler) | -| [SDK Incubator](./sdk-extensions/incubator) | SDK incubator, including YAML based view configuration, LeakDetectingSpanProcessor | `opentelemetry-sdk-extension-incubator` | 1.29.0-alpha | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-extension-incubator.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-extension-incubator) | +We are also present in the [`#otel-java`](https://cloud-native.slack.com/archives/C014L2KCTE3) channel in the [CNCF slack](https://slack.cncf.io/). +Please join us for more informal discussions. -### Shims +## Contributing -| Component | Description | Artifact ID | Version | Javadoc | -|----------------------------------------|--------------------------------------------------------------|----------------------------------|-------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [OpenCensus Shim](./opencensus-shim) | Bridge opencensus metrics into the OpenTelemetry metrics SDK | `opentelemetry-opencensus-shim` | 1.29.0-alpha | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-opencensus-shim.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-opencensus-shim) | -| [OpenTracing Shim](./opentracing-shim) | Bridge opentracing spans into the OpenTelemetry trace API | `opentelemetry-opentracing-shim` | 1.29.0 | [![Javadocs](https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-opentracing-shim.svg)](https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-opentracing-shim) | +See [CONTRIBUTING.md](CONTRIBUTING.md) for: -## Contributing +* Details on building locally +* Project scope +* Keys to successful PRs +* Guide to using gradle composite builds -See [CONTRIBUTING.md](CONTRIBUTING.md) +### Code owners Triagers: - [Gregor Zeitlinger](https://github.com/zeitlinger), Grafana Labs -*Find more about the triager role in [community repository](https://github.com/open-telemetry/community/blob/main/community-membership.md#triager).* +*Find more about the triager role in [community repository](https://github.com/open-telemetry/community/blob/main/guides/contributor/membership.md#triager).* Approvers ([@open-telemetry/java-approvers](https://github.com/orgs/open-telemetry/teams/java-approvers)): +- [Jason Plumb](https://github.com/breedx-splk), Splunk - [Josh Suereth](https://github.com/jsuereth), Google -- [Mateusz Rzeszutek](https://github.com/mateuszrzeszutek), Splunk +- [Lauri Tulmin](https://github.com/laurit), Splunk - [Trask Stalnaker](https://github.com/trask), Microsoft -*Find more about the approver role in [community repository](https://github.com/open-telemetry/community/blob/master/community-membership.md#approver).* +*Find more about the approver role in [community repository](https://github.com/open-telemetry/community/blob/main/guides/contributor/membership.md#approver).* Maintainers ([@open-telemetry/java-maintainers](https://github.com/orgs/open-telemetry/teams/java-maintainers)): - [Jack Berg](https://github.com/jack-berg), New Relic - [John Watson](https://github.com/jkwatson), Verta.ai -Maintainers Emeritus: +Emeritus: + +- Maintainer [Bogdan Drutu](https://github.com/BogdanDrutu) +- Maintainer [Carlos Alberto](https://github.com/carlosalberto) +- Approver [Mateusz Rzeszutek](https://github.com/mateuszrzeszutek) + +*Find more about the maintainer role in [community repository](https://github.com/open-telemetry/community/blob/main/guides/contributor/membership.md#maintainer).* + +### Help wanted -- [Bogdan Drutu](https://github.com/BogdanDrutu), Splunk -- [Carlos Alberto](https://github.com/carlosalberto), LightStep +We are currently resource constrained and are actively seeking new contributors interested in working towards [approver](https://github.com/open-telemetry/community/blob/main/guides/contributor/membership.md#approver) / [maintainer](https://github.com/open-telemetry/community/blob/main/guides/contributor/membership.md#maintainer) roles. In addition to the documentation for approver / maintainer roles and the [contributing](./CONTRIBUTING.md) guide, here are some additional notes on engaging: -*Find more about the maintainer role in [community repository](https://github.com/open-telemetry/community/blob/master/community-membership.md#maintainer).* +- [Pull request](https://github.com/open-telemetry/opentelemetry-java/pulls) reviews are equally or more helpful than code contributions. Comments and approvals are valuable with or without a formal project role. They're also a great forcing function to explore a fairly complex codebase. +- Attending the [Java: SDK + Automatic Instrumentation](https://github.com/open-telemetry/community?tab=readme-ov-file#implementation-sigs) Special Interest Group (SIG) is a great way to get to know community members and learn about project priorities. +- Issues labeled [help wanted](https://github.com/open-telemetry/opentelemetry-java/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) are project priorities. Code contributions (or pull request reviews when a PR is linked) for these issues are particularly important. +- Triaging / responding to new issues and discussions is a great way to engage with the project. ### Thanks to all the people who have contributed @@ -330,10 +305,6 @@ Made with [contrib.rocks](https://contrib.rocks). [ci-url]: https://github.com/open-telemetry/opentelemetry-java/actions?query=workflow%3ABuild+branch%3Amain [codecov-image]: https://codecov.io/gh/open-telemetry/opentelemetry-java/branch/main/graph/badge.svg [codecov-url]: https://app.codecov.io/gh/open-telemetry/opentelemetry-java/branch/main/ -[Manual instrumentation]: https://opentelemetry.io/docs/java/manual_instrumentation/ +[dependencies-and-boms]: https://opentelemetry.io/docs/languages/java/intro/#dependencies-and-boms [maven-image]: https://maven-badges.herokuapp.com/maven-central/io.opentelemetry/opentelemetry-api/badge.svg [maven-url]: https://maven-badges.herokuapp.com/maven-central/io.opentelemetry/opentelemetry-api -[opentelemetry-java-instrumentation]: https://github.com/open-telemetry/opentelemetry-java-instrumentation -[opentelemetry-java-docs]: https://github.com/open-telemetry/opentelemetry-java-docs -[opentelemetry.io]: https://opentelemetry.io -[otel-java-status]: https://opentelemetry.io/docs/instrumentation/java/#status-and-releases diff --git a/RELEASING.md b/RELEASING.md index f0a2f7da67f..a6e7a29b68e 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -59,6 +59,9 @@ and deadlocks. in which case no pull request will be created). * The [website](https://github.com/open-telemetry/opentelemetry.io) contains automation to update to the newly released version. Review and approve the pull request when available. + * The [website](https://opentelemetry.io/docs/languages/java/configuration/#zero-code-sdk-autoconfigure) + contains documentation on autoconfiguration properties. If the release has updated or modified any + properties, open and merge a pull request to update the documentation. ## Update release versions in documentations @@ -77,14 +80,14 @@ Create a PR against the main branch with the changes. ## Credentials -The following credentials are required for publishing (and automatically set in Github Actions): +The following credentials are required for building or publishing (and automatically set in Github Actions): * `GPG_PRIVATE_KEY` and `GPG_PASSWORD`: GPG private key and password for signing. * `SONATYPE_USER` and `SONATYPE_KEY`: Sonatype username and password. * Each maintainer will have their own set of Sonotype credentials with permission to publish to the `io.opentelemetry` group prefix. - * Request [publishing permissions](https://central.sonatype.org/publish/manage-permissions/) by - commenting on [OSSRH-63768](https://issues.sonatype.org/browse/OSSRH-63768) with confirmation + * [Register to publish](https://central.sonatype.org/register/central-portal/#and-publishing-is-easy) + and comment on [OSSRH-63768](https://issues.sonatype.org/browse/OSSRH-63768) with confirmation from another maintainer. * To obtain `SONATYPE_USER` and `SONATYPE_KEY` for your account, login to [oss.sonatype.org](https://oss.sonatype.org/) and navigate to Profile -> User Token -> Access diff --git a/VERSIONING.md b/VERSIONING.md index 9d2ad3af3cf..c54306a711c 100644 --- a/VERSIONING.md +++ b/VERSIONING.md @@ -18,7 +18,7 @@ changes are: reordering parameters, adding a method to an interface or abstract class without adding a default implementation. -- ABI changes that could require code using the artifact to be recompiled, but not changed, e.g., +- [ABI](https://wikipedia.org/wiki/Application_binary_interface) changes that could require code using the artifact to be recompiled, but not changed, e.g., changing the return type of a method from `void` to non-`void`, changing a `class` to an `interface`. The [JLS](https://docs.oracle.com/javase/specs/jls/se7/html/jls-13.html) has more information on what constitutes compatible changes. @@ -53,14 +53,28 @@ new artifact which requires adding the new artifact to dependency declarations. On rare occasions we may deprecate an entire stable artifact, with the intent of stopping functional changes or enhancements. In these situations we may stop publishing additional `MINOR` or `MAJOR` versions of the artifact. However, if necessary, we'll publish security fixes via `PATCH` releases. -Despite stopping publishing, new versions of the BOM will continue to reference the last published -version of the artifact, and the API of the last published version will remain stable. +The API of the last published version will remain stable. As a user, if you always depend on the latest version of the BOM for a given `MAJOR` version, and you do not use classes in the `internal` package (which you MUST NOT do), you can be assured that your app will always function and have access to the latest features of OpenTelemetry without needing any changes to code. +## Language Version Compatibility + +The artifacts published by this codebase are compatible with certain language levels of tooling in +the Java ecosystem. For example, all artifacts (except where otherwise noted) support Java language +level 8 or higher, and the many artifacts intended to be used in Android environments adhere to a +particular [Android API level](https://developer.android.com/tools/releases/build-tools). The +following table defines the minimum language levels we adhere to, and how each is considered with +respect to semantic versioning. + +| Language | Minimum Version | Applicability | Semconv Notes | +|----------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Java | 8+ | All artifacts, unless otherwise noted | Changing requires major version bump. | +| Android | 23+ (NOTE: [desugaring](https://developer.android.com/studio/write/java8-support#library-desugaring) is required. We stay up to date with the latest version of [desugar_jdk_libs](https://github.com/google/desugar_jdk_libs).) | Artifacts using `otel.animalsniffer-conventions` plugin | Kept in sync with minimum requirements for [Google Play services](https://developers.google.com/android/guides/setup). Subject to change in minor version. | +| Kotlin | 1.6+ | Only applies to `opentelemetry-extension-kotlin` | Kept in sync with [minimum non-deprecated](https://kotlinlang.org/docs/gradle-compiler-options.html#attributes-common-to-jvm-and-js) version. Subject to change in minor versions. | + ## API vs SDK This codebase is broadly split into two large pieces, the OpenTelemetry API and the OpenTelemetry SDK, diff --git a/all/build.gradle.kts b/all/build.gradle.kts index e659d58d1a8..2cd16981fbb 100644 --- a/all/build.gradle.kts +++ b/all/build.gradle.kts @@ -23,6 +23,11 @@ tasks { } } +// Skip OWASP dependencyCheck task on test module +dependencyCheck { + skip = true +} + val testTasks = mutableListOf() dependencies { @@ -40,19 +45,6 @@ dependencies { } } - // For testing BOM references to artifacts that were previously published - testImplementation(platform(project(":bom"))) - // The io.grpc.grpc-* dependencies are transitive dependencies of opentelemetry-exporter-jaeger-proto - // which must be provided by the user - testImplementation("io.opentelemetry:opentelemetry-exporter-jaeger-proto") - testImplementation("io.grpc:grpc-api") - testImplementation("io.grpc:grpc-protobuf") - testImplementation("io.grpc:grpc-stub") - testImplementation("io.opentelemetry:opentelemetry-extension-annotations") - testImplementation("io.opentelemetry:opentelemetry-extension-aws") - testImplementation("io.opentelemetry:opentelemetry-sdk-extension-resources") - testImplementation("io.opentelemetry:opentelemetry-sdk-extension-aws") - testImplementation("com.tngtech.archunit:archunit-junit5") } @@ -93,11 +85,7 @@ tasks.named("jacocoTestReport") { // Exclude mrjar (jacoco complains), shaded, and generated code !it.absolutePath.contains("META-INF/versions/") && !it.absolutePath.contains("/internal/shaded/") && - !it.absolutePath.contains("io/opentelemetry/proto/") && - !it.absolutePath.contains("io/opentelemetry/exporter/jaeger/proto/") && - !it.absolutePath.contains("io/opentelemetry/exporter/jaeger/internal/protobuf/") && !it.absolutePath.contains("io/opentelemetry/sdk/extension/trace/jaeger/proto/") && - !it.absolutePath.contains("io/opentelemetry/semconv/trace/attributes/") && !it.absolutePath.contains("AutoValue_") }, ) diff --git a/all/src/test/java/io/opentelemetry/all/FallbackArtifactsTest.java b/all/src/test/java/io/opentelemetry/all/FallbackArtifactsTest.java deleted file mode 100644 index eebe961e292..00000000000 --- a/all/src/test/java/io/opentelemetry/all/FallbackArtifactsTest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.all; - -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.Test; - -/** - * This test asserts that artifacts which are no longer published continue to be referenced in - * {@code opentelemetry-bom}. - */ -class FallbackArtifactsTest { - - @Test - void exporterJaegerProto() { - classAvailable("io.opentelemetry.exporter.jaeger.proto.api_v2.Collector"); - classAvailable("io.opentelemetry.exporter.jaeger.proto.api_v2.CollectorServiceGrpc"); - classAvailable("io.opentelemetry.exporter.jaeger.proto.api_v2.Model"); - } - - @Test - void extensionAnnotations() { - classAvailable("io.opentelemetry.extension.annotations.WithSpan"); - classAvailable("io.opentelemetry.extension.annotations.SpanAttribute"); - } - - @Test - void sdkExtensionResources() { - classAvailable("io.opentelemetry.sdk.extension.resources.ContainerResource"); - classAvailable("io.opentelemetry.sdk.extension.resources.ContainerResourceProvider"); - classAvailable("io.opentelemetry.sdk.extension.resources.HostResource"); - classAvailable("io.opentelemetry.sdk.extension.resources.HostResourceProvider"); - classAvailable("io.opentelemetry.sdk.extension.resources.OsResource"); - classAvailable("io.opentelemetry.sdk.extension.resources.OsResourceProvider"); - classAvailable("io.opentelemetry.sdk.extension.resources.ProcessResource"); - classAvailable("io.opentelemetry.sdk.extension.resources.ProcessResourceProvider"); - classAvailable("io.opentelemetry.sdk.extension.resources.ProcessRuntimeResource"); - classAvailable("io.opentelemetry.sdk.extension.resources.ProcessRuntimeResourceProvider"); - } - - @Test - void sdkExtensionAws() { - classAvailable("io.opentelemetry.sdk.extension.aws.resource.BeanstalkResource"); - classAvailable("io.opentelemetry.sdk.extension.aws.resource.BeanstalkResourceProvider"); - classAvailable("io.opentelemetry.sdk.extension.aws.resource.Ec2Resource"); - classAvailable("io.opentelemetry.sdk.extension.aws.resource.Ec2ResourceProvider"); - classAvailable("io.opentelemetry.sdk.extension.aws.resource.EcsResource"); - classAvailable("io.opentelemetry.sdk.extension.aws.resource.EcsResourceProvider"); - classAvailable("io.opentelemetry.sdk.extension.aws.resource.EksResource"); - classAvailable("io.opentelemetry.sdk.extension.aws.resource.EksResourceProvider"); - classAvailable("io.opentelemetry.sdk.extension.aws.resource.LambdaResource"); - classAvailable("io.opentelemetry.sdk.extension.aws.resource.LambdaResourceProvider"); - classAvailable("io.opentelemetry.sdk.extension.aws.trace.AwsXrayIdGenerator"); - } - - @Test - void extensionAws() { - classAvailable("io.opentelemetry.extension.aws.AwsConfigurablePropagator"); - classAvailable("io.opentelemetry.extension.aws.AwsXrayPropagator"); - } - - private static void classAvailable(String fqcn) { - Assertions.assertThatCode(() -> Class.forName(fqcn)).doesNotThrowAnyException(); - } -} diff --git a/animal-sniffer-signature/build.gradle.kts b/animal-sniffer-signature/build.gradle.kts new file mode 100644 index 00000000000..f9e67347a07 --- /dev/null +++ b/animal-sniffer-signature/build.gradle.kts @@ -0,0 +1,52 @@ +import ru.vyarus.gradle.plugin.animalsniffer.info.SignatureInfoTask +import ru.vyarus.gradle.plugin.animalsniffer.signature.BuildSignatureTask + +plugins { + id("otel.java-conventions") + id("ru.vyarus.animalsniffer") +} + +description = "Build tool to generate the Animal Sniffer Android signature" +otelJava.moduleName.set("io.opentelemetry.internal.animalsniffer") + +val signatureJar = configurations.create("signatureJar") { + isCanBeConsumed = false + isCanBeResolved = false +} +val signatureJarClasspath = configurations.create("signatureJarClasspath") { + isCanBeConsumed = false + isCanBeResolved = true + extendsFrom(signatureJar) +} +val generatedSignature = configurations.create("generatedSignature") { + isCanBeConsumed = true + isCanBeResolved = false +} +configurations.add(signatureJar) +configurations.add(signatureJarClasspath) +configurations.add(generatedSignature) + +dependencies { + signature("com.toasttab.android:gummy-bears-api-23:0.12.0@signature") + signatureJar("com.android.tools:desugar_jdk_libs") +} + +val signatureSimpleName = "android.signature" +val signatureBuilderTask = tasks.register("buildSignature", BuildSignatureTask::class.java) { + files(signatureJarClasspath) // All the jar files here will be added to the signature file. + signatures(configurations.signature) // We'll extend from the existing signatures added to this config. + outputName = signatureSimpleName // Name for the generated signature file. +} + +// Exposing the "generatedSignature" consumable config to be used in other subprojects +artifacts { + add("generatedSignature", project.provider { File(signatureBuilderTask.get().outputs.files.singleFile, signatureSimpleName) }) { + builtBy(signatureBuilderTask) + } +} + +// Utility task to show what's in the signature file +tasks.register("printSignature", SignatureInfoTask::class.java) { + signature = signatureBuilderTask.get().outputFiles + depth = 1 +} diff --git a/api/all/build.gradle.kts b/api/all/build.gradle.kts index 4998fd9ddd3..ad6896387d8 100644 --- a/api/all/build.gradle.kts +++ b/api/all/build.gradle.kts @@ -15,6 +15,8 @@ dependencies { annotationProcessor("com.google.auto.value:auto-value") + testImplementation(project(":api:testing-internal")) + testImplementation("edu.berkeley.cs.jqf:jqf-fuzz") testImplementation("com.google.guava:guava-testlib") } diff --git a/api/all/src/main/java/io/opentelemetry/api/GlobalOpenTelemetry.java b/api/all/src/main/java/io/opentelemetry/api/GlobalOpenTelemetry.java index 3e2244a95bb..230a85e06a9 100644 --- a/api/all/src/main/java/io/opentelemetry/api/GlobalOpenTelemetry.java +++ b/api/all/src/main/java/io/opentelemetry/api/GlobalOpenTelemetry.java @@ -53,8 +53,11 @@ public final class GlobalOpenTelemetry { private static final Object mutex = new Object(); - @Nullable private static volatile ObfuscatedOpenTelemetry globalOpenTelemetry; + @SuppressWarnings("NonFinalStaticField") + @Nullable + private static volatile ObfuscatedOpenTelemetry globalOpenTelemetry; + @SuppressWarnings("NonFinalStaticField") @GuardedBy("mutex") @Nullable private static Throwable setGlobalCaller; diff --git a/api/all/src/main/java/io/opentelemetry/api/baggage/Baggage.java b/api/all/src/main/java/io/opentelemetry/api/baggage/Baggage.java index 5dd2924d432..12b81d7b033 100644 --- a/api/all/src/main/java/io/opentelemetry/api/baggage/Baggage.java +++ b/api/all/src/main/java/io/opentelemetry/api/baggage/Baggage.java @@ -99,4 +99,23 @@ default boolean isEmpty() { * be set to not use an implicit parent, so any parent assignment must be done manually. */ BaggageBuilder toBuilder(); + + /** + * Returns the {@code BaggageEntry} associated with the given key. + * + * @param entryKey entry key to return the {@code BaggageEntry} for, or {@code null} if no {@code + * Entry} with the given {@code entryKey} is in this {@code Baggage}. + * @since 1.43.0 + */ + @Nullable + default BaggageEntry getEntry(String entryKey) { + BaggageEntry[] result = new BaggageEntry[] {null}; + forEach( + (key, entry) -> { + if (entryKey.equals(key)) { + result[0] = entry; + } + }); + return result[0]; + } } diff --git a/api/all/src/main/java/io/opentelemetry/api/baggage/ImmutableBaggage.java b/api/all/src/main/java/io/opentelemetry/api/baggage/ImmutableBaggage.java index 7faf4152fe7..d628e89b282 100644 --- a/api/all/src/main/java/io/opentelemetry/api/baggage/ImmutableBaggage.java +++ b/api/all/src/main/java/io/opentelemetry/api/baggage/ImmutableBaggage.java @@ -37,6 +37,13 @@ public String getEntryValue(String entryKey) { return entry != null ? entry.getValue() : null; } + // Overrides the default implementation to provide a more performant implementation. + @Nullable + @Override + public BaggageEntry getEntry(String entryKey) { + return get(entryKey); + } + @Override public BaggageBuilder toBuilder() { return new Builder(new ArrayList<>(data())); diff --git a/api/all/src/main/java/io/opentelemetry/api/baggage/package-info.java b/api/all/src/main/java/io/opentelemetry/api/baggage/package-info.java index 7cf942a9129..d119efa9a03 100644 --- a/api/all/src/main/java/io/opentelemetry/api/baggage/package-info.java +++ b/api/all/src/main/java/io/opentelemetry/api/baggage/package-info.java @@ -13,7 +13,6 @@ *

Note that entries are independent of the tracing data that is propagated in the {@link * io.opentelemetry.context.Context}, such as trace ID. */ -// TODO: Add code examples. @ParametersAreNonnullByDefault package io.opentelemetry.api.baggage; diff --git a/api/all/src/main/java/io/opentelemetry/api/baggage/propagation/Parser.java b/api/all/src/main/java/io/opentelemetry/api/baggage/propagation/Parser.java index d6c26fcef27..a616e8e24d2 100644 --- a/api/all/src/main/java/io/opentelemetry/api/baggage/propagation/Parser.java +++ b/api/all/src/main/java/io/opentelemetry/api/baggage/propagation/Parser.java @@ -12,11 +12,11 @@ /** * Implements single-pass Baggage parsing in accordance with https://w3c.github.io/baggage/ Key / - * value are restricted in accordance with https://www.ietf.org/rfc/rfc2616.txt + * value are restricted in accordance with https://www.ietf.org/rfc/rfc2616.txt. * *

Note: following aspects are not specified in RFC: - some invalid elements (key or value) - * parser will include valid ones, disregard invalid - empty "value" is regarded as invalid - meta - - * anything besides element terminator (comma) + * anything besides element terminator (comma). */ class Parser { @@ -37,7 +37,7 @@ private enum State { private boolean skipToNext; - public Parser(String baggageHeader) { + Parser(String baggageHeader) { this.baggageHeader = baggageHeader; reset(0); } diff --git a/api/all/src/main/java/io/opentelemetry/api/baggage/propagation/W3CBaggagePropagator.java b/api/all/src/main/java/io/opentelemetry/api/baggage/propagation/W3CBaggagePropagator.java index 9701214e506..e03f8e9078f 100644 --- a/api/all/src/main/java/io/opentelemetry/api/baggage/propagation/W3CBaggagePropagator.java +++ b/api/all/src/main/java/io/opentelemetry/api/baggage/propagation/W3CBaggagePropagator.java @@ -16,7 +16,9 @@ import io.opentelemetry.context.propagation.TextMapGetter; import io.opentelemetry.context.propagation.TextMapPropagator; import io.opentelemetry.context.propagation.TextMapSetter; +import io.opentelemetry.context.propagation.internal.ExtendedTextMapGetter; import java.util.Collection; +import java.util.Iterator; import java.util.List; import javax.annotation.Nullable; @@ -95,6 +97,14 @@ public Context extract(Context context, @Nullable C carrier, TextMapGetter) getter); + } + return extractSingle(context, carrier, getter); + } + + private static Context extractSingle( + Context context, @Nullable C carrier, TextMapGetter getter) { String baggageHeader = getter.get(carrier, FIELD); if (baggageHeader == null) { return context; @@ -112,6 +122,33 @@ public Context extract(Context context, @Nullable C carrier, TextMapGetter Context extractMulti( + Context context, @Nullable C carrier, ExtendedTextMapGetter getter) { + Iterator baggageHeaders = getter.getAll(carrier, FIELD); + if (baggageHeaders == null) { + return context; + } + + boolean extracted = false; + BaggageBuilder baggageBuilder = Baggage.builder(); + + while (baggageHeaders.hasNext()) { + String header = baggageHeaders.next(); + if (header.isEmpty()) { + continue; + } + + try { + extractEntries(header, baggageBuilder); + extracted = true; + } catch (RuntimeException expected) { + // invalid baggage header, continue + } + } + + return extracted ? context.with(baggageBuilder.build()) : context; + } + private static void extractEntries(String baggageHeader, BaggageBuilder baggageBuilder) { new Parser(baggageHeader).parseInto(baggageBuilder); } @@ -127,7 +164,7 @@ private static boolean baggageIsInvalid(String key, BaggageEntry baggageEntry) { * @return whether the name is valid. */ private static boolean isValidBaggageKey(String name) { - return name != null && !name.isEmpty() && StringUtils.isPrintableString(name); + return name != null && !name.trim().isEmpty() && StringUtils.isPrintableString(name); } /** diff --git a/api/all/src/main/java/io/opentelemetry/api/common/KeyValue.java b/api/all/src/main/java/io/opentelemetry/api/common/KeyValue.java new file mode 100644 index 00000000000..e5286015e91 --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/common/KeyValue.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.common; + +/** + * Key-value pair of {@link String} key and {@link Value} value. + * + * @see Value#of(KeyValue...) + * @since 1.42.0 + */ +public interface KeyValue { + + /** Returns a {@link KeyValue} for the given {@code key} and {@code value}. */ + static KeyValue of(String key, Value value) { + return KeyValueImpl.create(key, value); + } + + /** Returns the key. */ + String getKey(); + + /** Returns the value. */ + Value getValue(); +} diff --git a/api/all/src/main/java/io/opentelemetry/api/common/KeyValueImpl.java b/api/all/src/main/java/io/opentelemetry/api/common/KeyValueImpl.java new file mode 100644 index 00000000000..1525c3f3c69 --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/common/KeyValueImpl.java @@ -0,0 +1,18 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.common; + +import com.google.auto.value.AutoValue; + +@AutoValue +abstract class KeyValueImpl implements KeyValue { + + KeyValueImpl() {} + + static KeyValueImpl create(String key, Value value) { + return new AutoValue_KeyValueImpl(key, value); + } +} diff --git a/api/all/src/main/java/io/opentelemetry/api/common/KeyValueList.java b/api/all/src/main/java/io/opentelemetry/api/common/KeyValueList.java new file mode 100644 index 00000000000..42801205564 --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/common/KeyValueList.java @@ -0,0 +1,75 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.common; + +import static java.util.stream.Collectors.joining; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +final class KeyValueList implements Value> { + + private final List value; + + private KeyValueList(List value) { + this.value = value; + } + + static Value> create(KeyValue... value) { + Objects.requireNonNull(value, "value must not be null"); + List list = new ArrayList<>(value.length); + list.addAll(Arrays.asList(value)); + return new KeyValueList(Collections.unmodifiableList(list)); + } + + static Value> createFromMap(Map> value) { + Objects.requireNonNull(value, "value must not be null"); + KeyValue[] array = + value.entrySet().stream() + .map(entry -> KeyValue.of(entry.getKey(), entry.getValue())) + .toArray(KeyValue[]::new); + return create(array); + } + + @Override + public ValueType getType() { + return ValueType.KEY_VALUE_LIST; + } + + @Override + public List getValue() { + return value; + } + + @Override + public String asString() { + return value.stream() + .map(item -> item.getKey() + "=" + item.getValue().asString()) + .collect(joining(", ", "[", "]")); + } + + @Override + public String toString() { + return "KeyValueList{" + asString() + "}"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + return (o instanceof Value) && Objects.equals(this.value, ((Value) o).getValue()); + } + + @Override + public int hashCode() { + return value.hashCode(); + } +} diff --git a/api/all/src/main/java/io/opentelemetry/api/common/Value.java b/api/all/src/main/java/io/opentelemetry/api/common/Value.java new file mode 100644 index 00000000000..a29be801e27 --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/common/Value.java @@ -0,0 +1,118 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.common; + +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Map; + +/** + * Value mirrors the proto AnyValue + * message type, and is used to model any type. + * + *

It can be used to represent: + * + *

    + *
  • Primitive values via {@link #of(long)}, {@link #of(String)}, {@link #of(boolean)}, {@link + * #of(double)}. + *
  • String-keyed maps (i.e. associative arrays, dictionaries) via {@link #of(KeyValue...)}, + * {@link #of(Map)}. Note, because map values are type {@link Value}, maps can be nested + * within other maps. + *
  • Arrays (heterogeneous or homogenous) via {@link #of(Value[])}. Note, because array values + * are type {@link Value}, arrays can contain primitives, complex types like maps or arrays, + * or any combination. + *
  • Raw bytes via {@link #of(byte[])} + *
+ * + *

Currently, Value is only used as an argument for {@link + * io.opentelemetry.api.logs.LogRecordBuilder#setBody(Value)}. + * + * @param the type. See {@link #getValue()} for description of types. + * @since 1.42.0 + */ +public interface Value { + + /** Returns an {@link Value} for the {@link String} value. */ + static Value of(String value) { + return ValueString.create(value); + } + + /** Returns an {@link Value} for the {@code boolean} value. */ + static Value of(boolean value) { + return ValueBoolean.create(value); + } + + /** Returns an {@link Value} for the {@code long} value. */ + static Value of(long value) { + return ValueLong.create(value); + } + + /** Returns an {@link Value} for the {@code double} value. */ + static Value of(double value) { + return ValueDouble.create(value); + } + + /** Returns an {@link Value} for the {@code byte[]} value. */ + static Value of(byte[] value) { + return ValueBytes.create(value); + } + + /** Returns an {@link Value} for the array of {@link Value} values. */ + static Value>> of(Value... value) { + return ValueArray.create(value); + } + + /** Returns an {@link Value} for the list of {@link Value} values. */ + static Value>> of(List> value) { + return ValueArray.create(value); + } + + /** + * Returns an {@link Value} for the array of {@link KeyValue} values. {@link KeyValue#getKey()} + * values should not repeat - duplicates may be dropped. + */ + static Value> of(KeyValue... value) { + return KeyValueList.create(value); + } + + /** Returns an {@link Value} for the {@link Map} of key, {@link Value}. */ + static Value> of(Map> value) { + return KeyValueList.createFromMap(value); + } + + /** Returns the type of this {@link Value}. Useful for building switch statements. */ + ValueType getType(); + + /** + * Returns the value for this {@link Value}. + * + *

The return type varies by {@link #getType()} as described below: + * + *

    + *
  • {@link ValueType#STRING} returns {@link String} + *
  • {@link ValueType#BOOLEAN} returns {@code boolean} + *
  • {@link ValueType#LONG} returns {@code long} + *
  • {@link ValueType#DOUBLE} returns {@code double} + *
  • {@link ValueType#ARRAY} returns {@link List} of {@link Value} + *
  • {@link ValueType#KEY_VALUE_LIST} returns {@link List} of {@link KeyValue} + *
  • {@link ValueType#BYTES} returns read only {@link ByteBuffer}. See {@link + * ByteBuffer#asReadOnlyBuffer()}. + *
+ */ + T getValue(); + + /** + * Return a string encoding of this {@link Value}. This is intended to be a fallback serialized + * representation in case there is no suitable encoding that can utilize {@link #getType()} / + * {@link #getValue()} to serialize specific types. + * + *

WARNING: No guarantees are made about the encoding of this string response. It MAY change in + * a future minor release. If you need a reliable string encoding, write your own serializer. + */ + // TODO(jack-berg): Should this be a JSON encoding? + String asString(); +} diff --git a/api/all/src/main/java/io/opentelemetry/api/common/ValueArray.java b/api/all/src/main/java/io/opentelemetry/api/common/ValueArray.java new file mode 100644 index 00000000000..55c9e5f42b7 --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/common/ValueArray.java @@ -0,0 +1,67 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.common; + +import static java.util.stream.Collectors.joining; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +final class ValueArray implements Value>> { + + private final List> value; + + private ValueArray(List> value) { + this.value = value; + } + + static Value>> create(Value... value) { + Objects.requireNonNull(value, "value must not be null"); + List> list = new ArrayList<>(value.length); + list.addAll(Arrays.asList(value)); + return new ValueArray(Collections.unmodifiableList(list)); + } + + static Value>> create(List> value) { + return new ValueArray(Collections.unmodifiableList(value)); + } + + @Override + public ValueType getType() { + return ValueType.ARRAY; + } + + @Override + public List> getValue() { + return value; + } + + @Override + public String asString() { + return value.stream().map(Value::asString).collect(joining(", ", "[", "]")); + } + + @Override + public String toString() { + return "ValueArray{" + asString() + "}"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + return (o instanceof Value) && Objects.equals(this.value, ((Value) o).getValue()); + } + + @Override + public int hashCode() { + return value.hashCode(); + } +} diff --git a/api/all/src/main/java/io/opentelemetry/api/common/ValueBoolean.java b/api/all/src/main/java/io/opentelemetry/api/common/ValueBoolean.java new file mode 100644 index 00000000000..a4364d414df --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/common/ValueBoolean.java @@ -0,0 +1,54 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.common; + +import java.util.Objects; + +final class ValueBoolean implements Value { + + private final boolean value; + + private ValueBoolean(boolean value) { + this.value = value; + } + + static Value create(boolean value) { + return new ValueBoolean(value); + } + + @Override + public ValueType getType() { + return ValueType.BOOLEAN; + } + + @Override + public Boolean getValue() { + return value; + } + + @Override + public String asString() { + return String.valueOf(value); + } + + @Override + public String toString() { + return "ValueBoolean{" + asString() + "}"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + return (o instanceof Value) && Objects.equals(this.value, ((Value) o).getValue()); + } + + @Override + public int hashCode() { + return Boolean.hashCode(value); + } +} diff --git a/api/all/src/main/java/io/opentelemetry/api/common/ValueBytes.java b/api/all/src/main/java/io/opentelemetry/api/common/ValueBytes.java new file mode 100644 index 00000000000..8d925cd174d --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/common/ValueBytes.java @@ -0,0 +1,58 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.common; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Base64; +import java.util.Objects; + +final class ValueBytes implements Value { + + private final byte[] raw; + + private ValueBytes(byte[] value) { + this.raw = value; + } + + static Value create(byte[] value) { + Objects.requireNonNull(value, "value must not be null"); + return new ValueBytes(Arrays.copyOf(value, value.length)); + } + + @Override + public ValueType getType() { + return ValueType.BYTES; + } + + @Override + public ByteBuffer getValue() { + return ByteBuffer.wrap(raw).asReadOnlyBuffer(); + } + + @Override + public String asString() { + return Base64.getEncoder().encodeToString(raw); + } + + @Override + public String toString() { + return "ValueBytes{" + asString() + "}"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + return (o instanceof ValueBytes) && Arrays.equals(this.raw, ((ValueBytes) o).raw); + } + + @Override + public int hashCode() { + return Arrays.hashCode(raw); + } +} diff --git a/api/all/src/main/java/io/opentelemetry/api/common/ValueDouble.java b/api/all/src/main/java/io/opentelemetry/api/common/ValueDouble.java new file mode 100644 index 00000000000..21f13dd7e78 --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/common/ValueDouble.java @@ -0,0 +1,54 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.common; + +import java.util.Objects; + +final class ValueDouble implements Value { + + private final double value; + + private ValueDouble(double value) { + this.value = value; + } + + static Value create(double value) { + return new ValueDouble(value); + } + + @Override + public ValueType getType() { + return ValueType.DOUBLE; + } + + @Override + public Double getValue() { + return value; + } + + @Override + public String asString() { + return String.valueOf(value); + } + + @Override + public String toString() { + return "ValueDouble{" + asString() + "}"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + return (o instanceof Value) && Objects.equals(this.value, ((Value) o).getValue()); + } + + @Override + public int hashCode() { + return Double.hashCode(value); + } +} diff --git a/api/all/src/main/java/io/opentelemetry/api/common/ValueLong.java b/api/all/src/main/java/io/opentelemetry/api/common/ValueLong.java new file mode 100644 index 00000000000..8cd1bca4bf9 --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/common/ValueLong.java @@ -0,0 +1,54 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.common; + +import java.util.Objects; + +final class ValueLong implements Value { + + private final long value; + + private ValueLong(long value) { + this.value = value; + } + + static Value create(long value) { + return new ValueLong(value); + } + + @Override + public ValueType getType() { + return ValueType.LONG; + } + + @Override + public Long getValue() { + return value; + } + + @Override + public String asString() { + return String.valueOf(value); + } + + @Override + public String toString() { + return "ValueLong{" + asString() + "}"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + return (o instanceof Value) && Objects.equals(this.value, ((Value) o).getValue()); + } + + @Override + public int hashCode() { + return Long.hashCode(value); + } +} diff --git a/api/all/src/main/java/io/opentelemetry/api/common/ValueString.java b/api/all/src/main/java/io/opentelemetry/api/common/ValueString.java new file mode 100644 index 00000000000..726cb27dee3 --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/common/ValueString.java @@ -0,0 +1,55 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.common; + +import java.util.Objects; + +final class ValueString implements Value { + + private final String value; + + private ValueString(String value) { + this.value = value; + } + + static Value create(String value) { + Objects.requireNonNull(value, "value must not be null"); + return new ValueString(value); + } + + @Override + public ValueType getType() { + return ValueType.STRING; + } + + @Override + public String getValue() { + return value; + } + + @Override + public String asString() { + return value; + } + + @Override + public String toString() { + return "ValueString{" + value + "}"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + return (o instanceof Value) && Objects.equals(this.value, ((Value) o).getValue()); + } + + @Override + public int hashCode() { + return value.hashCode(); + } +} diff --git a/api/all/src/main/java/io/opentelemetry/api/common/ValueType.java b/api/all/src/main/java/io/opentelemetry/api/common/ValueType.java new file mode 100644 index 00000000000..d7a60722a55 --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/common/ValueType.java @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.common; + +/** + * AnyValue type options, mirroring AnyValue#value + * options. + * + * @since 1.42.0 + */ +public enum ValueType { + STRING, + BOOLEAN, + LONG, + DOUBLE, + ARRAY, + KEY_VALUE_LIST, + BYTES +} diff --git a/api/all/src/main/java/io/opentelemetry/api/internal/ConfigUtil.java b/api/all/src/main/java/io/opentelemetry/api/internal/ConfigUtil.java index 56d2d378d72..3a10a23cb99 100644 --- a/api/all/src/main/java/io/opentelemetry/api/internal/ConfigUtil.java +++ b/api/all/src/main/java/io/opentelemetry/api/internal/ConfigUtil.java @@ -5,8 +5,10 @@ package io.opentelemetry.api.internal; +import java.util.ConcurrentModificationException; import java.util.Locale; import java.util.Map; +import java.util.Properties; import javax.annotation.Nullable; /** @@ -19,6 +21,17 @@ public final class ConfigUtil { private ConfigUtil() {} + /** + * Returns a copy of system properties which is safe to iterate over. + * + *

In java 8 and android environments, iterating through system properties may trigger {@link + * ConcurrentModificationException}. This method ensures callers can iterate safely without risk + * of exception. See https://github.com/open-telemetry/opentelemetry-java/issues/6732 for details. + */ + public static Properties safeSystemProperties() { + return (Properties) System.getProperties().clone(); + } + /** * Return the system property or environment variable for the {@code key}. * @@ -33,8 +46,9 @@ private ConfigUtil() {} */ public static String getString(String key, String defaultValue) { String normalizedKey = normalizePropertyKey(key); + String systemProperty = - System.getProperties().entrySet().stream() + safeSystemProperties().entrySet().stream() .filter(entry -> normalizedKey.equals(normalizePropertyKey(entry.getKey().toString()))) .map(entry -> entry.getValue().toString()) .findFirst() diff --git a/api/all/src/main/java/io/opentelemetry/api/internal/ImmutableKeyValuePairs.java b/api/all/src/main/java/io/opentelemetry/api/internal/ImmutableKeyValuePairs.java index a2590633b83..4e6b19cee36 100644 --- a/api/all/src/main/java/io/opentelemetry/api/internal/ImmutableKeyValuePairs.java +++ b/api/all/src/main/java/io/opentelemetry/api/internal/ImmutableKeyValuePairs.java @@ -274,4 +274,13 @@ public String toString() { sb.append("}"); return sb.toString(); } + + /** + * Return the backing data array for these attributes. This is only exposed for internal use by + * opentelemetry authors. The contents of the array MUST NOT be modified. + */ + @SuppressWarnings("AvoidObjectArrays") + public Object[] getData() { + return data; + } } diff --git a/api/all/src/main/java/io/opentelemetry/api/internal/IncubatingUtil.java b/api/all/src/main/java/io/opentelemetry/api/internal/IncubatingUtil.java new file mode 100644 index 00000000000..1ef82d373f2 --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/internal/IncubatingUtil.java @@ -0,0 +1,29 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.internal; + +import java.lang.reflect.Method; + +/** + * Incubating utilities. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public class IncubatingUtil { + private IncubatingUtil() {} + + @SuppressWarnings("unchecked") + public static T incubatingApiIfAvailable(T stableApi, String incubatingClassName) { + try { + Class incubatingClass = Class.forName(incubatingClassName); + Method getInstance = incubatingClass.getDeclaredMethod("getNoop"); + return (T) getInstance.invoke(null); + } catch (Exception e) { + return stableApi; + } + } +} diff --git a/api/all/src/main/java/io/opentelemetry/api/internal/InstrumentationUtil.java b/api/all/src/main/java/io/opentelemetry/api/internal/InstrumentationUtil.java new file mode 100644 index 00000000000..4f5c1e676bf --- /dev/null +++ b/api/all/src/main/java/io/opentelemetry/api/internal/InstrumentationUtil.java @@ -0,0 +1,40 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.internal; + +import io.opentelemetry.context.Context; +import io.opentelemetry.context.ContextKey; +import java.util.Objects; + +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ +public final class InstrumentationUtil { + private static final ContextKey SUPPRESS_INSTRUMENTATION_KEY = + ContextKey.named("suppress_instrumentation"); + + private InstrumentationUtil() {} + + /** + * Adds a Context boolean key that will allow to identify HTTP calls coming from OTel exporters. + * The key later be checked by an automatic instrumentation to avoid tracing OTel exporter's + * calls. + */ + public static void suppressInstrumentation(Runnable runnable) { + Context.current().with(SUPPRESS_INSTRUMENTATION_KEY, true).wrap(runnable).run(); + } + + /** + * Checks if an automatic instrumentation should be suppressed with the provided Context. + * + * @return TRUE to suppress the automatic instrumentation, FALSE to continue with the + * instrumentation. + */ + public static boolean shouldSuppressInstrumentation(Context context) { + return Objects.equals(context.get(SUPPRESS_INSTRUMENTATION_KEY), true); + } +} diff --git a/api/all/src/main/java/io/opentelemetry/api/internal/OtelEncodingUtils.java b/api/all/src/main/java/io/opentelemetry/api/internal/OtelEncodingUtils.java index ba8e99fe1c3..bb6a5484488 100644 --- a/api/all/src/main/java/io/opentelemetry/api/internal/OtelEncodingUtils.java +++ b/api/all/src/main/java/io/opentelemetry/api/internal/OtelEncodingUtils.java @@ -89,10 +89,15 @@ public static void longToBase16String(long value, char[] dest, int destOffset) { /** Returns the {@code byte[]} decoded from the given hex {@link CharSequence}. */ public static byte[] bytesFromBase16(CharSequence value, int length) { byte[] result = new byte[length / 2]; + bytesFromBase16(value, length, result); + return result; + } + + /** Fills {@code bytes} with bytes decoded from the given hex {@link CharSequence}. */ + public static void bytesFromBase16(CharSequence value, int length, byte[] bytes) { for (int i = 0; i < length; i += 2) { - result[i / 2] = byteFromBase16(value.charAt(i), value.charAt(i + 1)); + bytes[i / 2] = byteFromBase16(value.charAt(i), value.charAt(i + 1)); } - return result; } /** Fills {@code dest} with the hex encoding of {@code bytes}. */ diff --git a/api/all/src/main/java/io/opentelemetry/api/logs/DefaultLogger.java b/api/all/src/main/java/io/opentelemetry/api/logs/DefaultLogger.java index 47644104dcb..56284b43cb2 100644 --- a/api/all/src/main/java/io/opentelemetry/api/logs/DefaultLogger.java +++ b/api/all/src/main/java/io/opentelemetry/api/logs/DefaultLogger.java @@ -6,6 +6,7 @@ package io.opentelemetry.api.logs; import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Value; import io.opentelemetry.context.Context; import java.time.Instant; import java.util.concurrent.TimeUnit; @@ -70,6 +71,11 @@ public LogRecordBuilder setBody(String body) { return this; } + @Override + public LogRecordBuilder setBody(Value body) { + return this; + } + @Override public LogRecordBuilder setAttribute(AttributeKey key, T value) { return this; diff --git a/api/all/src/main/java/io/opentelemetry/api/logs/LogRecordBuilder.java b/api/all/src/main/java/io/opentelemetry/api/logs/LogRecordBuilder.java index 1b7e24d19a9..9e071baed1d 100644 --- a/api/all/src/main/java/io/opentelemetry/api/logs/LogRecordBuilder.java +++ b/api/all/src/main/java/io/opentelemetry/api/logs/LogRecordBuilder.java @@ -5,8 +5,14 @@ package io.opentelemetry.api.logs; +import static io.opentelemetry.api.common.AttributeKey.booleanKey; +import static io.opentelemetry.api.common.AttributeKey.doubleKey; +import static io.opentelemetry.api.common.AttributeKey.longKey; +import static io.opentelemetry.api.common.AttributeKey.stringKey; + import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.Value; import io.opentelemetry.context.Context; import java.time.Instant; import java.util.concurrent.TimeUnit; @@ -66,9 +72,23 @@ public interface LogRecordBuilder { /** Set the severity text. */ LogRecordBuilder setSeverityText(String severityText); - /** Set the body string. */ + /** + * Set the body string. + * + *

Shorthand for calling {@link #setBody(Value)} with {@link Value#of(String)}. + */ LogRecordBuilder setBody(String body); + /** + * Set the body {@link Value}. + * + * @since 1.42.0 + */ + default LogRecordBuilder setBody(Value body) { + setBody(body.asString()); + return this; + } + /** * Sets attributes. If the {@link LogRecordBuilder} previously contained a mapping for any of the * keys, the old values are replaced by the specified values. @@ -83,9 +103,96 @@ default LogRecordBuilder setAllAttributes(Attributes attributes) { return this; } - /** Sets an attribute. */ + /** + * Sets an attribute on the {@code LogRecord}. If the {@code LogRecord} previously contained a + * mapping for the key, the old value is replaced by the specified value. + * + * @param key the key for this attribute. + * @param value the value for this attribute. + * @return this. + */ LogRecordBuilder setAttribute(AttributeKey key, T value); + /** + * Sets a String attribute on the {@code LogRecord}. If the {@code LogRecord} previously contained + * a mapping for the key, the old value is replaced by the specified value. + * + *

Note: It is strongly recommended to use {@link #setAttribute(AttributeKey, Object)}, and + * pre-allocate your keys, if possible. + * + * @param key the key for this attribute. + * @param value the value for this attribute. + * @return this. + * @since 1.48.0 + */ + default LogRecordBuilder setAttribute(String key, String value) { + return setAttribute(stringKey(key), value); + } + + /** + * Sets a Long attribute on the {@code LogRecord}. If the {@code LogRecord} previously contained a + * mapping for the key, the old value is replaced by the specified value. + * + *

Note: It is strongly recommended to use {@link #setAttribute(AttributeKey, Object)}, and + * pre-allocate your keys, if possible. + * + * @param key the key for this attribute. + * @param value the value for this attribute. + * @return this. + * @since 1.48.0 + */ + default LogRecordBuilder setAttribute(String key, long value) { + return setAttribute(longKey(key), value); + } + + /** + * Sets a Double attribute on the {@code LogRecord}. If the {@code LogRecord} previously contained + * a mapping for the key, the old value is replaced by the specified value. + * + *

Note: It is strongly recommended to use {@link #setAttribute(AttributeKey, Object)}, and + * pre-allocate your keys, if possible. + * + * @param key the key for this attribute. + * @param value the value for this attribute. + * @return this. + * @since 1.48.0 + */ + default LogRecordBuilder setAttribute(String key, double value) { + return setAttribute(doubleKey(key), value); + } + + /** + * Sets a Boolean attribute on the {@code LogRecord}. If the {@code LogRecord} previously + * contained a mapping for the key, the old value is replaced by the specified value. + * + *

Note: It is strongly recommended to use {@link #setAttribute(AttributeKey, Object)}, and + * pre-allocate your keys, if possible. + * + * @param key the key for this attribute. + * @param value the value for this attribute. + * @return this. + * @since 1.48.0 + */ + default LogRecordBuilder setAttribute(String key, boolean value) { + return setAttribute(booleanKey(key), value); + } + + /** + * Sets an Integer attribute on the {@code LogRecord}. If the {@code LogRecord} previously + * contained a mapping for the key, the old value is replaced by the specified value. + * + *

Note: It is strongly recommended to use {@link #setAttribute(AttributeKey, Object)}, and + * pre-allocate your keys, if possible. + * + * @param key the key for this attribute. + * @param value the value for this attribute. + * @return this. + * @since 1.48.0 + */ + default LogRecordBuilder setAttribute(String key, int value) { + return setAttribute(key, (long) value); + } + /** Emit the log record. */ void emit(); } diff --git a/api/all/src/main/java/io/opentelemetry/api/logs/LoggerProvider.java b/api/all/src/main/java/io/opentelemetry/api/logs/LoggerProvider.java index d00cb310ffc..5bad7eeee51 100644 --- a/api/all/src/main/java/io/opentelemetry/api/logs/LoggerProvider.java +++ b/api/all/src/main/java/io/opentelemetry/api/logs/LoggerProvider.java @@ -5,6 +5,7 @@ package io.opentelemetry.api.logs; +import io.opentelemetry.api.internal.IncubatingUtil; import javax.annotation.concurrent.ThreadSafe; /** @@ -43,6 +44,8 @@ default Logger get(String instrumentationScopeName) { /** Returns a no-op {@link LoggerProvider} which provides Loggers which do not record or emit. */ static LoggerProvider noop() { - return DefaultLoggerProvider.getInstance(); + return IncubatingUtil.incubatingApiIfAvailable( + DefaultLoggerProvider.getInstance(), + "io.opentelemetry.api.incubator.logs.ExtendedDefaultLoggerProvider"); } } diff --git a/api/all/src/main/java/io/opentelemetry/api/metrics/DefaultMeter.java b/api/all/src/main/java/io/opentelemetry/api/metrics/DefaultMeter.java index 9cab67f239d..fff36ae9cbb 100644 --- a/api/all/src/main/java/io/opentelemetry/api/metrics/DefaultMeter.java +++ b/api/all/src/main/java/io/opentelemetry/api/metrics/DefaultMeter.java @@ -320,8 +320,10 @@ public LongHistogram build() { } private static class NoopDoubleGaugeBuilder implements DoubleGaugeBuilder { - private static final ObservableDoubleGauge NOOP = new ObservableDoubleGauge() {}; + private static final ObservableDoubleGauge NOOP_OBSERVABLE_GAUGE = + new ObservableDoubleGauge() {}; private static final LongGaugeBuilder NOOP_LONG_GAUGE_BUILDER = new NoopLongGaugeBuilder(); + private static final NoopDoubleGauge NOOP_GAUGE = new NoopDoubleGauge(); @Override public DoubleGaugeBuilder setDescription(String description) { @@ -340,17 +342,34 @@ public LongGaugeBuilder ofLongs() { @Override public ObservableDoubleGauge buildWithCallback(Consumer callback) { - return NOOP; + return NOOP_OBSERVABLE_GAUGE; } @Override public ObservableDoubleMeasurement buildObserver() { return NOOP_OBSERVABLE_DOUBLE_MEASUREMENT; } + + @Override + public DoubleGauge build() { + return NOOP_GAUGE; + } + } + + private static class NoopDoubleGauge implements DoubleGauge { + @Override + public void set(double value) {} + + @Override + public void set(double value, Attributes attributes) {} + + @Override + public void set(double value, Attributes attributes, Context context) {} } private static class NoopLongGaugeBuilder implements LongGaugeBuilder { - private static final ObservableLongGauge NOOP = new ObservableLongGauge() {}; + private static final ObservableLongGauge NOOP_OBSERVABLE_GAUGE = new ObservableLongGauge() {}; + private static final NoopLongGauge NOOP_GAUGE = new NoopLongGauge(); @Override public LongGaugeBuilder setDescription(String description) { @@ -364,13 +383,29 @@ public LongGaugeBuilder setUnit(String unit) { @Override public ObservableLongGauge buildWithCallback(Consumer callback) { - return NOOP; + return NOOP_OBSERVABLE_GAUGE; } @Override public ObservableLongMeasurement buildObserver() { return NOOP_OBSERVABLE_LONG_MEASUREMENT; } + + @Override + public LongGauge build() { + return NOOP_GAUGE; + } + } + + private static class NoopLongGauge implements LongGauge { + @Override + public void set(long value) {} + + @Override + public void set(long value, Attributes attributes) {} + + @Override + public void set(long value, Attributes attributes, Context context) {} } private static class NoopObservableDoubleMeasurement implements ObservableDoubleMeasurement { diff --git a/api/all/src/main/java/io/opentelemetry/api/metrics/DefaultMeterProvider.java b/api/all/src/main/java/io/opentelemetry/api/metrics/DefaultMeterProvider.java index 6d1a6de3d48..3ea78ec2d34 100644 --- a/api/all/src/main/java/io/opentelemetry/api/metrics/DefaultMeterProvider.java +++ b/api/all/src/main/java/io/opentelemetry/api/metrics/DefaultMeterProvider.java @@ -5,6 +5,8 @@ package io.opentelemetry.api.metrics; +import io.opentelemetry.api.internal.IncubatingUtil; + /** A {@link MeterProvider} that does nothing. */ class DefaultMeterProvider implements MeterProvider { @Override @@ -12,7 +14,10 @@ public MeterBuilder meterBuilder(String instrumentationScopeName) { return BUILDER_INSTANCE; } - private static final DefaultMeterProvider INSTANCE = new DefaultMeterProvider(); + private static final MeterProvider INSTANCE = + IncubatingUtil.incubatingApiIfAvailable( + new DefaultMeterProvider(), + "io.opentelemetry.api.incubator.metrics.ExtendedDefaultMeterProvider"); private static final MeterBuilder BUILDER_INSTANCE = new NoopMeterBuilder(); static MeterProvider getInstance() { diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleGauge.java b/api/all/src/main/java/io/opentelemetry/api/metrics/DoubleGauge.java similarity index 53% rename from extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleGauge.java rename to api/all/src/main/java/io/opentelemetry/api/metrics/DoubleGauge.java index 1842f7d90e1..a755ff25b16 100644 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleGauge.java +++ b/api/all/src/main/java/io/opentelemetry/api/metrics/DoubleGauge.java @@ -3,12 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.extension.incubator.metrics; +package io.opentelemetry.api.metrics; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.context.Context; import javax.annotation.concurrent.ThreadSafe; -/** A gauge instrument that synchronously records {@code double} values. */ +/** + * A gauge instrument that synchronously records {@code double} values. + * + * @since 1.38.0 + */ @ThreadSafe public interface DoubleGauge { /** @@ -26,5 +31,12 @@ public interface DoubleGauge { */ void set(double value, Attributes attributes); - // TODO(jack-berg): should we add overload with Context argument? + /** + * Records a value with a set of attributes. + * + * @param value The current gauge value. + * @param attributes A set of attributes to associate with the value. + * @param context The explicit context to associate with this measurement. + */ + void set(double value, Attributes attributes, Context context); } diff --git a/api/all/src/main/java/io/opentelemetry/api/metrics/DoubleGaugeBuilder.java b/api/all/src/main/java/io/opentelemetry/api/metrics/DoubleGaugeBuilder.java index f73f0133d66..b0ea8b939fa 100644 --- a/api/all/src/main/java/io/opentelemetry/api/metrics/DoubleGaugeBuilder.java +++ b/api/all/src/main/java/io/opentelemetry/api/metrics/DoubleGaugeBuilder.java @@ -65,4 +65,21 @@ public interface DoubleGaugeBuilder { default ObservableDoubleMeasurement buildObserver() { return DefaultMeter.getInstance().gaugeBuilder("noop").buildObserver(); } + + /** + * Builds and returns a DoubleGauge instrument with the configuration. + * + *

NOTE: This produces a synchronous gauge which records gauge values as they occur. Most users + * will want to instead register an {@link #buildWithCallback(Consumer)} to asynchronously observe + * the value of the gauge when metrics are collected. + * + *

If using the OpenTelemetry SDK, by default gauges use last value aggregation, such that only + * the value of the last recorded measurement is exported. + * + * @return The DoubleGauge instrument. + * @since 1.38.0 + */ + default DoubleGauge build() { + return DefaultMeter.getInstance().gaugeBuilder("noop").build(); + } } diff --git a/api/all/src/main/java/io/opentelemetry/api/metrics/DoubleHistogramBuilder.java b/api/all/src/main/java/io/opentelemetry/api/metrics/DoubleHistogramBuilder.java index 68d89a3302b..bc93b3c9e3f 100644 --- a/api/all/src/main/java/io/opentelemetry/api/metrics/DoubleHistogramBuilder.java +++ b/api/all/src/main/java/io/opentelemetry/api/metrics/DoubleHistogramBuilder.java @@ -5,6 +5,8 @@ package io.opentelemetry.api.metrics; +import java.util.List; + /** * Builder class for {@link DoubleHistogram}. * @@ -32,6 +34,20 @@ public interface DoubleHistogramBuilder { */ DoubleHistogramBuilder setUnit(String unit); + /** + * Set the explicit bucket buckets boundaries advice, which suggests the recommended set of + * explicit bucket boundaries for this histogram. + * + * @param bucketBoundaries The explicit bucket boundaries advice. + * @see Explicit + * bucket boundaries advisory parameter + * @since 1.32.0 + */ + default DoubleHistogramBuilder setExplicitBucketBoundariesAdvice(List bucketBoundaries) { + return this; + } + /** Sets the Counter for recording {@code long} values. */ LongHistogramBuilder ofLongs(); diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongGauge.java b/api/all/src/main/java/io/opentelemetry/api/metrics/LongGauge.java similarity index 53% rename from extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongGauge.java rename to api/all/src/main/java/io/opentelemetry/api/metrics/LongGauge.java index 1aba6d1d160..018f60e323b 100644 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongGauge.java +++ b/api/all/src/main/java/io/opentelemetry/api/metrics/LongGauge.java @@ -3,12 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.extension.incubator.metrics; +package io.opentelemetry.api.metrics; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.context.Context; import javax.annotation.concurrent.ThreadSafe; -/** A gauge instrument that synchronously records {@code long} values. */ +/** + * A gauge instrument that synchronously records {@code long} values. + * + * @since 1.38.0 + */ @ThreadSafe public interface LongGauge { /** @@ -26,5 +31,12 @@ public interface LongGauge { */ void set(long value, Attributes attributes); - // TODO(jack-berg): should we add overload with Context argument? + /** + * Records a value with a set of attributes. + * + * @param value The current gauge value. + * @param attributes A set of attributes to associate with the value. + * @param context The explicit context to associate with this measurement. + */ + void set(long value, Attributes attributes, Context context); } diff --git a/api/all/src/main/java/io/opentelemetry/api/metrics/LongGaugeBuilder.java b/api/all/src/main/java/io/opentelemetry/api/metrics/LongGaugeBuilder.java index b4eb490906f..f33c53155e7 100644 --- a/api/all/src/main/java/io/opentelemetry/api/metrics/LongGaugeBuilder.java +++ b/api/all/src/main/java/io/opentelemetry/api/metrics/LongGaugeBuilder.java @@ -62,4 +62,21 @@ public interface LongGaugeBuilder { default ObservableLongMeasurement buildObserver() { return DefaultMeter.getInstance().gaugeBuilder("noop").ofLongs().buildObserver(); } + + /** + * Builds and returns a LongGauge instrument with the configuration. + * + *

NOTE: This produces a synchronous gauge which records gauge values as they occur. Most users + * will want to instead register an {@link #buildWithCallback(Consumer)} to asynchronously observe + * the value of the gauge when metrics are collected. + * + *

If using the OpenTelemetry SDK, by default gauges use last value aggregation, such that only + * the value of the last recorded measurement is exported. + * + * @return The LongGauge instrument. + * @since 1.38.0 + */ + default LongGauge build() { + return DefaultMeter.getInstance().gaugeBuilder("noop").ofLongs().build(); + } } diff --git a/api/all/src/main/java/io/opentelemetry/api/metrics/LongHistogramBuilder.java b/api/all/src/main/java/io/opentelemetry/api/metrics/LongHistogramBuilder.java index 80f59753457..baad3928346 100644 --- a/api/all/src/main/java/io/opentelemetry/api/metrics/LongHistogramBuilder.java +++ b/api/all/src/main/java/io/opentelemetry/api/metrics/LongHistogramBuilder.java @@ -5,6 +5,8 @@ package io.opentelemetry.api.metrics; +import java.util.List; + /** * Builder class for {@link LongHistogram}. * @@ -31,6 +33,20 @@ public interface LongHistogramBuilder { */ LongHistogramBuilder setUnit(String unit); + /** + * Set the explicit bucket buckets boundaries advice, which suggests the recommended set of + * explicit bucket boundaries for this histogram. + * + * @param bucketBoundaries The explicit bucket boundaries advice. + * @see Explicit + * bucket boundaries advisory parameter + * @since 1.32.0 + */ + default LongHistogramBuilder setExplicitBucketBoundariesAdvice(List bucketBoundaries) { + return this; + } + /** * Builds and returns a Histogram instrument with the configuration. * diff --git a/api/all/src/main/java/io/opentelemetry/api/metrics/Meter.java b/api/all/src/main/java/io/opentelemetry/api/metrics/Meter.java index a640334d4d5..d8566457d4c 100644 --- a/api/all/src/main/java/io/opentelemetry/api/metrics/Meter.java +++ b/api/all/src/main/java/io/opentelemetry/api/metrics/Meter.java @@ -65,7 +65,7 @@ public interface Meter { * callbacks). * * @param name the name of the Counter. Instrument names must consist of 255 or fewer characters - * including alphanumeric, _, ., -, and start with a letter. + * including alphanumeric, _, ., -, /, and start with a letter. * @return a builder for configuring a Counter instrument. Defaults to recording long values, but * may be changed. * @see Links are used to link {@link Span}s in different traces. Used (for example) in batching + * operations, where a single batch handler processes multiple requests from different traces or + * the same trace. + * + *

Implementations may ignore calls with an {@linkplain SpanContext#isValid() invalid span + * context}. + * + *

Callers should prefer to add links before starting the span via {@link + * SpanBuilder#addLink(SpanContext)} if possible. + * + * @param spanContext the context of the linked {@code Span}. + * @return this. + * @since 1.37.0 + */ + default Span addLink(SpanContext spanContext) { + return addLink(spanContext, Attributes.empty()); + } + + /** + * Adds a link to this {@code Span}. + * + *

Links are used to link {@link Span}s in different traces. Used (for example) in batching + * operations, where a single batch handler processes multiple requests from different traces or + * the same trace. + * + *

Implementations may ignore calls with an {@linkplain SpanContext#isValid() invalid span + * context}. + * + *

Callers should prefer to add links before starting the span via {@link + * SpanBuilder#addLink(SpanContext, Attributes)} if possible. + * + * @param spanContext the context of the linked {@code Span}. + * @param attributes the attributes of the {@code Link}. + * @return this. + * @since 1.37.0 + */ + default Span addLink(SpanContext spanContext, Attributes attributes) { + return this; + } + /** * Marks the end of {@code Span} execution. * diff --git a/api/all/src/main/java/io/opentelemetry/api/trace/SpanBuilder.java b/api/all/src/main/java/io/opentelemetry/api/trace/SpanBuilder.java index 3b1726933a8..c8727dd8471 100644 --- a/api/all/src/main/java/io/opentelemetry/api/trace/SpanBuilder.java +++ b/api/all/src/main/java/io/opentelemetry/api/trace/SpanBuilder.java @@ -238,6 +238,19 @@ public interface SpanBuilder { */ SpanBuilder setAttribute(AttributeKey key, T value); + /** + * Sets an attribute to the newly created {@code Span}. If {@code SpanBuilder} previously + * contained a mapping for the key, the old value is replaced by the specified value. + * + * @param key the key for this attribute. + * @param value the value for this attribute. + * @return this. + * @since 1.45.0 + */ + default SpanBuilder setAttribute(AttributeKey key, int value) { + return setAttribute(key, (long) value); + } + /** * Sets attributes to the {@link SpanBuilder}. If the {@link SpanBuilder} previously contained a * mapping for any of the keys, the old values are replaced by the specified values. diff --git a/api/all/src/main/java/io/opentelemetry/api/trace/package-info.java b/api/all/src/main/java/io/opentelemetry/api/trace/package-info.java index 2970a7eeadf..1a6e4789fb7 100644 --- a/api/all/src/main/java/io/opentelemetry/api/trace/package-info.java +++ b/api/all/src/main/java/io/opentelemetry/api/trace/package-info.java @@ -18,7 +18,6 @@ * io.opentelemetry.context.Context} and between process using one of the wire propagation formats * supported in the {@code opentelemetry.trace.propagation} package. */ -// TODO: Add code examples. @ParametersAreNonnullByDefault package io.opentelemetry.api.trace; diff --git a/api/all/src/main/resources/META-INF/native-image/io.opentelemetry/opentelemetry-api/resource-config.json b/api/all/src/main/resources/META-INF/native-image/io.opentelemetry/opentelemetry-api/resource-config.json new file mode 100644 index 00000000000..a8c57fbdb83 --- /dev/null +++ b/api/all/src/main/resources/META-INF/native-image/io.opentelemetry/opentelemetry-api/resource-config.json @@ -0,0 +1,10 @@ +{ + "resources": { + "includes": [ + { + "pattern":"\\Qio/opentelemetry/api/version.properties\\E" + } + ] + }, + "bundles": [] +} diff --git a/api/all/src/test/java/io/opentelemetry/api/OpenTelemetryTest.java b/api/all/src/test/java/io/opentelemetry/api/OpenTelemetryTest.java index 42a4cd31476..f985cffe2ed 100644 --- a/api/all/src/test/java/io/opentelemetry/api/OpenTelemetryTest.java +++ b/api/all/src/test/java/io/opentelemetry/api/OpenTelemetryTest.java @@ -5,100 +5,25 @@ package io.opentelemetry.api; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.Mockito.mock; - import io.opentelemetry.api.logs.LoggerProvider; import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.api.testing.internal.AbstractOpenTelemetryTest; import io.opentelemetry.api.trace.TracerProvider; -import io.opentelemetry.context.propagation.ContextPropagators; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -class OpenTelemetryTest { - - @BeforeAll - static void beforeClass() { - GlobalOpenTelemetry.resetForTest(); - } - - @AfterEach - void after() { - GlobalOpenTelemetry.resetForTest(); - } - - @Test - void testDefault() { - assertThat(OpenTelemetry.noop().getTracerProvider()).isSameAs(TracerProvider.noop()); - assertThat(OpenTelemetry.noop().getPropagators()).isSameAs(ContextPropagators.noop()); - assertThat(OpenTelemetry.noop().getMeterProvider()).isSameAs(MeterProvider.noop()); - assertThat(OpenTelemetry.noop().getLogsBridge()).isSameAs(LoggerProvider.noop()); - } - @Test - void propagating() { - ContextPropagators contextPropagators = mock(ContextPropagators.class); - OpenTelemetry openTelemetry = OpenTelemetry.propagating(contextPropagators); - - assertThat(openTelemetry.getTracerProvider()).isSameAs(TracerProvider.noop()); - assertThat(openTelemetry.getMeterProvider()).isSameAs(MeterProvider.noop()); - assertThat(openTelemetry.getLogsBridge()).isSameAs(LoggerProvider.noop()); - assertThat(openTelemetry.getPropagators()).isSameAs(contextPropagators); - } - - @Test - void testGlobalBeforeSet() { - assertThat(GlobalOpenTelemetry.getTracerProvider()).isSameAs(TracerProvider.noop()); - assertThat(GlobalOpenTelemetry.getTracerProvider()) - .isSameAs(GlobalOpenTelemetry.getTracerProvider()); - assertThat(GlobalOpenTelemetry.getPropagators()).isSameAs(GlobalOpenTelemetry.getPropagators()); - } - - @Test - void independentNonGlobalPropagators() { - ContextPropagators propagators1 = mock(ContextPropagators.class); - OpenTelemetry otel1 = OpenTelemetry.propagating(propagators1); - ContextPropagators propagators2 = mock(ContextPropagators.class); - OpenTelemetry otel2 = OpenTelemetry.propagating(propagators2); - - assertThat(otel1.getPropagators()).isSameAs(propagators1); - assertThat(otel2.getPropagators()).isSameAs(propagators2); - } - - @Test - void setThenSet() { - setOpenTelemetry(); - assertThatThrownBy(() -> GlobalOpenTelemetry.set(OpenTelemetry.noop())) - .isInstanceOf(IllegalStateException.class) - .hasMessageContaining("GlobalOpenTelemetry.set has already been called") - .hasStackTraceContaining("setOpenTelemetry"); - } - - @Test - void getThenSet() { - assertThat(getOpenTelemetry()).isInstanceOf(DefaultOpenTelemetry.class); - assertThatThrownBy(() -> GlobalOpenTelemetry.set(OpenTelemetry.noop())) - .isInstanceOf(IllegalStateException.class) - .hasMessageContaining("GlobalOpenTelemetry.set has already been called") - .hasStackTraceContaining("getOpenTelemetry"); - } +class OpenTelemetryTest extends AbstractOpenTelemetryTest { - @Test - void toString_noop_Valid() { - assertThat(OpenTelemetry.noop().toString()) - .isEqualTo( - "DefaultOpenTelemetry{" - + "propagators=DefaultContextPropagators{textMapPropagator=NoopTextMapPropagator}" - + "}"); + @Override + protected TracerProvider getTracerProvider() { + return TracerProvider.noop(); } - private static void setOpenTelemetry() { - GlobalOpenTelemetry.set(OpenTelemetry.noop()); + @Override + protected MeterProvider getMeterProvider() { + return MeterProvider.noop(); } - private static OpenTelemetry getOpenTelemetry() { - return GlobalOpenTelemetry.get(); + @Override + protected LoggerProvider getLoggerProvider() { + return LoggerProvider.noop(); } } diff --git a/api/all/src/test/java/io/opentelemetry/api/baggage/BaggageTest.java b/api/all/src/test/java/io/opentelemetry/api/baggage/BaggageTest.java index 4c141276e1d..f30ee5703cc 100644 --- a/api/all/src/test/java/io/opentelemetry/api/baggage/BaggageTest.java +++ b/api/all/src/test/java/io/opentelemetry/api/baggage/BaggageTest.java @@ -9,6 +9,10 @@ import io.opentelemetry.context.Context; import io.opentelemetry.context.Scope; +import java.util.HashMap; +import java.util.Map; +import java.util.function.BiConsumer; +import javax.annotation.Nullable; import org.junit.jupiter.api.Test; class BaggageTest { @@ -27,4 +31,45 @@ void current() { assertThat(result.getEntryValue("foo")).isEqualTo("bar"); } } + + @Test + void getEntryDefault() { + BaggageEntryMetadata metadata = BaggageEntryMetadata.create("flib"); + Map result = new HashMap<>(); + result.put("a", ImmutableEntry.create("b", metadata)); + // Implementation that only implements asMap() which is used by getEntry() + Baggage baggage = + new Baggage() { + + @Override + public Map asMap() { + return result; + } + + @Override + public int size() { + return 0; + } + + @Override + public void forEach(BiConsumer consumer) { + result.forEach(consumer); + } + + @Nullable + @Override + public String getEntryValue(String entryKey) { + return null; + } + + @Override + public BaggageBuilder toBuilder() { + return null; + } + }; + + BaggageEntry entry = baggage.getEntry("a"); + assertThat(entry.getValue()).isEqualTo("b"); + assertThat(entry.getMetadata().getValue()).isEqualTo("flib"); + } } diff --git a/api/all/src/test/java/io/opentelemetry/api/baggage/ImmutableBaggageTest.java b/api/all/src/test/java/io/opentelemetry/api/baggage/ImmutableBaggageTest.java index 311c8bae337..a10d2ca1902 100644 --- a/api/all/src/test/java/io/opentelemetry/api/baggage/ImmutableBaggageTest.java +++ b/api/all/src/test/java/io/opentelemetry/api/baggage/ImmutableBaggageTest.java @@ -9,6 +9,8 @@ import static org.assertj.core.api.Assertions.entry; import com.google.common.testing.EqualsTester; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; import org.junit.jupiter.api.Test; /** @@ -190,4 +192,15 @@ void testEquals() { .addEqualityGroup(baggage2, baggage3) .testEquals(); } + + @Test + void getEntry() { + BaggageEntryMetadata metadata = BaggageEntryMetadata.create("flib"); + try (Scope scope = + Context.root().with(Baggage.builder().put("a", "b", metadata).build()).makeCurrent()) { + Baggage result = Baggage.current(); + assertThat(result.getEntry("a").getValue()).isEqualTo("b"); + assertThat(result.getEntry("a").getMetadata().getValue()).isEqualTo("flib"); + } + } } diff --git a/api/all/src/test/java/io/opentelemetry/api/baggage/propagation/W3CBaggagePropagatorTest.java b/api/all/src/test/java/io/opentelemetry/api/baggage/propagation/W3CBaggagePropagatorTest.java index 3fbf7bf5297..fb0c342affc 100644 --- a/api/all/src/test/java/io/opentelemetry/api/baggage/propagation/W3CBaggagePropagatorTest.java +++ b/api/all/src/test/java/io/opentelemetry/api/baggage/propagation/W3CBaggagePropagatorTest.java @@ -8,14 +8,18 @@ import static java.util.Collections.singletonMap; import static org.assertj.core.api.Assertions.assertThat; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.opentelemetry.api.baggage.Baggage; import io.opentelemetry.api.baggage.BaggageEntryMetadata; import io.opentelemetry.context.Context; import io.opentelemetry.context.propagation.TextMapGetter; +import io.opentelemetry.context.propagation.internal.ExtendedTextMapGetter; import java.util.Collections; import java.util.HashMap; +import java.util.Iterator; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import javax.annotation.Nullable; import org.junit.jupiter.api.Test; @@ -36,6 +40,28 @@ public String get(Map carrier, String key) { } }; + private static final ExtendedTextMapGetter>> multiGetter = + new ExtendedTextMapGetter>>() { + @Override + public Iterable keys(Map> carrier) { + return carrier.keySet(); + } + + @Nullable + @Override + public String get(Map> carrier, String key) { + return carrier.getOrDefault(key, Collections.emptyList()).stream() + .findFirst() + .orElse(null); + } + + @Override + public Iterator getAll(Map> carrier, String key) { + List values = carrier.get(key); + return values == null ? Collections.emptyIterator() : values.iterator(); + } + }; + @Test void fields() { assertThat(W3CBaggagePropagator.getInstance().fields()).containsExactly("baggage"); @@ -421,6 +447,101 @@ void extract_nullGetter() { .isSameAs(context); } + @Test + void extract_multiple_headers() { + W3CBaggagePropagator propagator = W3CBaggagePropagator.getInstance(); + + Context result = + propagator.extract( + Context.root(), + ImmutableMap.of("baggage", ImmutableList.of("k1=v1", "k2=v2")), + multiGetter); + + Baggage expectedBaggage = Baggage.builder().put("k1", "v1").put("k2", "v2").build(); + assertThat(Baggage.fromContext(result)).isEqualTo(expectedBaggage); + } + + @Test + void extract_multiple_headers_duplicate_key() { + W3CBaggagePropagator propagator = W3CBaggagePropagator.getInstance(); + + Context result = + propagator.extract( + Context.root(), + ImmutableMap.of("baggage", ImmutableList.of("k1=v1", "k1=v2")), + multiGetter); + + Baggage expectedBaggage = Baggage.builder().put("k1", "v2").build(); + assertThat(Baggage.fromContext(result)).isEqualTo(expectedBaggage); + } + + @Test + void extract_multiple_headers_mixed_duplicates_non_duplicates() { + W3CBaggagePropagator propagator = W3CBaggagePropagator.getInstance(); + + Context result = + propagator.extract( + Context.root(), + ImmutableMap.of("baggage", ImmutableList.of("k1=v1,k2=v0", "k2=v2,k3=v3")), + multiGetter); + + Baggage expectedBaggage = + Baggage.builder().put("k1", "v1").put("k2", "v2").put("k3", "v3").build(); + assertThat(Baggage.fromContext(result)).isEqualTo(expectedBaggage); + } + + @Test + void extract_multiple_headers_all_empty() { + W3CBaggagePropagator propagator = W3CBaggagePropagator.getInstance(); + + Context result = + propagator.extract( + Context.root(), ImmutableMap.of("baggage", ImmutableList.of("", "")), multiGetter); + + Baggage expectedBaggage = Baggage.builder().build(); + assertThat(Baggage.fromContext(result)).isEqualTo(expectedBaggage); + } + + @Test + void extract_multiple_headers_some_empty() { + W3CBaggagePropagator propagator = W3CBaggagePropagator.getInstance(); + + Context result = + propagator.extract( + Context.root(), ImmutableMap.of("baggage", ImmutableList.of("", "k=v")), multiGetter); + + Baggage expectedBaggage = Baggage.builder().put("k", "v").build(); + assertThat(Baggage.fromContext(result)).isEqualTo(expectedBaggage); + } + + @Test + void extract_multiple_headers_all_invalid() { + W3CBaggagePropagator propagator = W3CBaggagePropagator.getInstance(); + + Context result = + propagator.extract( + Context.root(), + ImmutableMap.of("baggage", ImmutableList.of("!@#$%^", "key=va%lue")), + multiGetter); + + Baggage expectedBaggage = Baggage.builder().build(); + assertThat(Baggage.fromContext(result)).isEqualTo(expectedBaggage); + } + + @Test + void extract_multiple_headers_some_invalid() { + W3CBaggagePropagator propagator = W3CBaggagePropagator.getInstance(); + + Context result = + propagator.extract( + Context.root(), + ImmutableMap.of("baggage", ImmutableList.of("k1=v1", "key=va%lue", "k2=v2")), + multiGetter); + + Baggage expectedBaggage = Baggage.builder().put("k1", "v1").put("k2", "v2").build(); + assertThat(Baggage.fromContext(result)).isEqualTo(expectedBaggage); + } + @Test void inject_noBaggage() { W3CBaggagePropagator propagator = W3CBaggagePropagator.getInstance(); @@ -448,6 +569,7 @@ void inject() { .put("\2ab\3cd", "wacky key nonprintable") .put(null, "null key") .put("nullvalue", null) + .put(" ", "key is only space") .build(); W3CBaggagePropagator propagator = W3CBaggagePropagator.getInstance(); Map carrier = new HashMap<>(); diff --git a/api/all/src/test/java/io/opentelemetry/api/internal/ConfigUtilTest.java b/api/all/src/test/java/io/opentelemetry/api/internal/ConfigUtilTest.java index f7dde6a9735..a546c32862b 100644 --- a/api/all/src/test/java/io/opentelemetry/api/internal/ConfigUtilTest.java +++ b/api/all/src/test/java/io/opentelemetry/api/internal/ConfigUtilTest.java @@ -6,7 +6,15 @@ package io.opentelemetry.api.internal; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; import org.junit.jupiter.api.Test; import org.junitpioneer.jupiter.SetSystemProperty; @@ -56,4 +64,45 @@ void defaultIfnull() { assertThat(ConfigUtil.defaultIfNull("val1", "val2")).isEqualTo("val1"); assertThat(ConfigUtil.defaultIfNull(null, "val2")).isEqualTo("val2"); } + + @Test + @SuppressWarnings("ReturnValueIgnored") + void systemPropertiesConcurrentAccess() throws ExecutionException, InterruptedException { + int threads = 4; + ExecutorService executor = Executors.newFixedThreadPool(threads); + try { + int cycles = 1000; + CountDownLatch latch = new CountDownLatch(1); + List> futures = new ArrayList<>(); + for (int i = 0; i < threads; i++) { + futures.add( + executor.submit( + () -> { + try { + latch.await(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + + for (int j = 0; j < cycles; j++) { + String property = "prop " + j; + System.setProperty(property, "a"); + System.getProperties().remove(property); + } + })); + } + + latch.countDown(); + for (int i = 0; i < cycles; i++) { + assertThatCode(() -> ConfigUtil.getString("x", "y")).doesNotThrowAnyException(); + } + + for (Future future : futures) { + future.get(); + } + + } finally { + executor.shutdownNow(); + } + } } diff --git a/api/all/src/test/java/io/opentelemetry/api/internal/InstrumentationUtilTest.java b/api/all/src/test/java/io/opentelemetry/api/internal/InstrumentationUtilTest.java new file mode 100644 index 00000000000..066c906d875 --- /dev/null +++ b/api/all/src/test/java/io/opentelemetry/api/internal/InstrumentationUtilTest.java @@ -0,0 +1,27 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.internal; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.opentelemetry.context.Context; +import org.junit.jupiter.api.Test; + +class InstrumentationUtilTest { + @Test + void verifySuppressInstrumentation() { + // Should be false by default. + assertFalse(InstrumentationUtil.shouldSuppressInstrumentation(Context.current())); + + // Should be true inside the Runnable passed to InstrumentationUtil.suppressInstrumentation. + InstrumentationUtil.suppressInstrumentation( + () -> assertTrue(InstrumentationUtil.shouldSuppressInstrumentation(Context.current()))); + + // Should be false after the runnable finishes. + assertFalse(InstrumentationUtil.shouldSuppressInstrumentation(Context.current())); + } +} diff --git a/api/all/src/test/java/io/opentelemetry/api/logs/DefaultLoggerProviderTest.java b/api/all/src/test/java/io/opentelemetry/api/logs/DefaultLoggerProviderTest.java deleted file mode 100644 index 81a8bec1f84..00000000000 --- a/api/all/src/test/java/io/opentelemetry/api/logs/DefaultLoggerProviderTest.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.logs; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatCode; - -import org.junit.jupiter.api.Test; - -class DefaultLoggerProviderTest { - - @Test - void noopLoggerProvider_doesNotThrow() { - LoggerProvider provider = LoggerProvider.noop(); - - assertThat(provider).isSameAs(DefaultLoggerProvider.getInstance()); - assertThatCode(() -> provider.get("scope-name")).doesNotThrowAnyException(); - assertThatCode( - () -> - provider - .loggerBuilder("scope-name") - .setInstrumentationVersion("1.0") - .setSchemaUrl("http://schema.com") - .build()) - .doesNotThrowAnyException(); - - assertThatCode(() -> provider.loggerBuilder("scope-name").build().logRecordBuilder()) - .doesNotThrowAnyException(); - } -} diff --git a/api/all/src/test/java/io/opentelemetry/api/logs/DefaultLoggerTest.java b/api/all/src/test/java/io/opentelemetry/api/logs/DefaultLoggerTest.java index 9f43ab22b87..10b43897a3e 100644 --- a/api/all/src/test/java/io/opentelemetry/api/logs/DefaultLoggerTest.java +++ b/api/all/src/test/java/io/opentelemetry/api/logs/DefaultLoggerTest.java @@ -5,34 +5,17 @@ package io.opentelemetry.api.logs; -import static org.assertj.core.api.Assertions.assertThatCode; +import io.opentelemetry.api.testing.internal.AbstractDefaultLoggerTest; -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.context.Context; -import java.time.Instant; -import java.util.concurrent.TimeUnit; -import org.junit.jupiter.api.Test; +class DefaultLoggerTest extends AbstractDefaultLoggerTest { -class DefaultLoggerTest { + @Override + protected LoggerProvider getLoggerProvider() { + return DefaultLoggerProvider.getInstance(); + } - @Test - void buildAndEmit() { - assertThatCode( - () -> - DefaultLogger.getInstance() - .logRecordBuilder() - .setTimestamp(100, TimeUnit.SECONDS) - .setTimestamp(Instant.now()) - .setObservedTimestamp(100, TimeUnit.SECONDS) - .setObservedTimestamp(Instant.now()) - .setContext(Context.root()) - .setSeverity(Severity.DEBUG) - .setSeverityText("debug") - .setBody("body") - .setAttribute(AttributeKey.stringKey("key1"), "value1") - .setAllAttributes(Attributes.builder().put("key2", "value2").build()) - .emit()) - .doesNotThrowAnyException(); + @Override + protected Logger getLogger() { + return DefaultLogger.getInstance(); } } diff --git a/api/all/src/test/java/io/opentelemetry/api/logs/ValueTest.java b/api/all/src/test/java/io/opentelemetry/api/logs/ValueTest.java new file mode 100644 index 00000000000..ae83e0dd44c --- /dev/null +++ b/api/all/src/test/java/io/opentelemetry/api/logs/ValueTest.java @@ -0,0 +1,215 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.logs; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.params.provider.Arguments.arguments; + +import io.opentelemetry.api.common.KeyValue; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.common.ValueType; +import java.nio.ByteBuffer; +import java.nio.ReadOnlyBufferException; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Base64; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.stream.Stream; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class ValueTest { + + @Test + void value_OfString() { + assertThat(Value.of("foo")) + .satisfies( + value -> { + assertThat(value.getType()).isEqualTo(ValueType.STRING); + assertThat(value.getValue()).isEqualTo("foo"); + assertThat(value).hasSameHashCodeAs(Value.of("foo")); + }); + } + + @Test + void value_OfBoolean() { + assertThat(Value.of(true)) + .satisfies( + value -> { + assertThat(value.getType()).isEqualTo(ValueType.BOOLEAN); + assertThat(value.getValue()).isEqualTo(true); + assertThat(value).hasSameHashCodeAs(Value.of(true)); + }); + } + + @Test + void value_OfLong() { + assertThat(Value.of(1L)) + .satisfies( + value -> { + assertThat(value.getType()).isEqualTo(ValueType.LONG); + assertThat(value.getValue()).isEqualTo(1L); + assertThat(value).hasSameHashCodeAs(Value.of(1L)); + }); + } + + @Test + void value_OfDouble() { + assertThat(Value.of(1.1)) + .satisfies( + value -> { + assertThat(value.getType()).isEqualTo(ValueType.DOUBLE); + assertThat(value.getValue()).isEqualTo(1.1); + assertThat(value).hasSameHashCodeAs(Value.of(1.1)); + }); + } + + @Test + void value_OfByteArray() { + assertThat(Value.of(new byte[] {'a', 'b'})) + .satisfies( + value -> { + assertThat(value.getType()).isEqualTo(ValueType.BYTES); + ByteBuffer buf = value.getValue(); + // ValueBytes returns read only view of ByteBuffer + assertThatThrownBy(buf::array).isInstanceOf(ReadOnlyBufferException.class); + byte[] bytes = new byte[buf.remaining()]; + buf.get(bytes); + assertThat(bytes).isEqualTo(new byte[] {'a', 'b'}); + assertThat(value).hasSameHashCodeAs(Value.of(new byte[] {'a', 'b'})); + }); + } + + @Test + void value_OfvalueArray() { + assertThat(Value.of(Value.of(true), Value.of(1L))) + .satisfies( + value -> { + assertThat(value.getType()).isEqualTo(ValueType.ARRAY); + assertThat(value.getValue()).isEqualTo(Arrays.asList(Value.of(true), Value.of(1L))); + assertThat(value).hasSameHashCodeAs(Value.of(Value.of(true), Value.of(1L))); + }); + } + + @Test + @SuppressWarnings("DoubleBraceInitialization") + void value_OfKeyValueList() { + assertThat(Value.of(KeyValue.of("bool", Value.of(true)), KeyValue.of("long", Value.of(1L)))) + .satisfies( + value -> { + assertThat(value.getType()).isEqualTo(ValueType.KEY_VALUE_LIST); + assertThat(value.getValue()) + .isEqualTo( + Arrays.asList( + KeyValue.of("bool", Value.of(true)), KeyValue.of("long", Value.of(1L)))); + assertThat(value) + .hasSameHashCodeAs( + Value.of( + KeyValue.of("bool", Value.of(true)), KeyValue.of("long", Value.of(1L)))); + }); + + assertThat( + Value.of( + new LinkedHashMap>() { + { + put("bool", Value.of(true)); + put("long", Value.of(1L)); + } + })) + .satisfies( + value -> { + assertThat(value.getType()).isEqualTo(ValueType.KEY_VALUE_LIST); + assertThat(value.getValue()) + .isEqualTo( + Arrays.asList( + KeyValue.of("bool", Value.of(true)), KeyValue.of("long", Value.of(1L)))); + assertThat(value) + .hasSameHashCodeAs( + Value.of( + new LinkedHashMap>() { + { + put("bool", Value.of(true)); + put("long", Value.of(1L)); + } + })); + }); + } + + @Test + void value_NullsNotAllowed() { + assertThatThrownBy(() -> Value.of((String) null)) + .isInstanceOf(NullPointerException.class) + .hasMessageContaining("value must not be null"); + assertThatThrownBy(() -> Value.of((byte[]) null)) + .isInstanceOf(NullPointerException.class) + .hasMessageContaining("value must not be null"); + assertThatThrownBy(() -> Value.of((Value[]) null)) + .isInstanceOf(NullPointerException.class) + .hasMessageContaining("value must not be null"); + assertThatThrownBy(() -> Value.of((KeyValue[]) null)) + .isInstanceOf(NullPointerException.class) + .hasMessageContaining("value must not be null"); + assertThatThrownBy(() -> Value.of((Map>) null)) + .isInstanceOf(NullPointerException.class) + .hasMessageContaining("value must not be null"); + } + + @ParameterizedTest + @MethodSource("asStringArgs") + void asString(Value value, String expectedAsString) { + assertThat(value.asString()).isEqualTo(expectedAsString); + } + + @SuppressWarnings("DoubleBraceInitialization") + private static Stream asStringArgs() { + return Stream.of( + // primitives + arguments(Value.of("str"), "str"), + arguments(Value.of(true), "true"), + arguments(Value.of(1), "1"), + arguments(Value.of(1.1), "1.1"), + // heterogeneous array + arguments( + Value.of(Value.of("str"), Value.of(true), Value.of(1), Value.of(1.1)), + "[str, true, 1, 1.1]"), + // key value list from KeyValue array + arguments( + Value.of(KeyValue.of("key1", Value.of("val1")), KeyValue.of("key2", Value.of(2))), + "[key1=val1, key2=2]"), + // key value list from map + arguments( + Value.of( + new LinkedHashMap>() { + { + put("key1", Value.of("val1")); + put("key2", Value.of(2)); + } + }), + "[key1=val1, key2=2]"), + // map of map + arguments( + Value.of( + Collections.singletonMap( + "child", Value.of(Collections.singletonMap("grandchild", Value.of("str"))))), + "[child=[grandchild=str]]"), + // bytes + arguments(Value.of("hello world".getBytes(StandardCharsets.UTF_8)), "aGVsbG8gd29ybGQ=")); + } + + @Test + void valueByteAsString() { + // TODO: add more test cases + String str = "hello world"; + String base64Encoded = Value.of(str.getBytes(StandardCharsets.UTF_8)).asString(); + byte[] decodedBytes = Base64.getDecoder().decode(base64Encoded); + assertThat(new String(decodedBytes, StandardCharsets.UTF_8)).isEqualTo(str); + } +} diff --git a/api/all/src/test/java/io/opentelemetry/api/metrics/DefaultMeterProviderTest.java b/api/all/src/test/java/io/opentelemetry/api/metrics/DefaultMeterProviderTest.java deleted file mode 100644 index 786d0a68a6e..00000000000 --- a/api/all/src/test/java/io/opentelemetry/api/metrics/DefaultMeterProviderTest.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.metrics; - -import org.junit.jupiter.api.Test; - -public class DefaultMeterProviderTest { - @Test - void noopMeterProvider_getDoesNotThrow() { - MeterProvider provider = MeterProvider.noop(); - provider.get("user-instrumentation"); - } - - @Test - void noopMeterProvider_builderDoesNotThrow() { - MeterProvider provider = MeterProvider.noop(); - provider.meterBuilder("user-instrumentation").build(); - provider.meterBuilder("advanced-instrumetnation").setInstrumentationVersion("1.0").build(); - provider.meterBuilder("schema-instrumentation").setSchemaUrl("myschema://url").build(); - provider - .meterBuilder("schema-instrumentation") - .setInstrumentationVersion("1.0") - .setSchemaUrl("myschema://url") - .build(); - } -} diff --git a/api/all/src/test/java/io/opentelemetry/api/metrics/DefaultMeterTest.java b/api/all/src/test/java/io/opentelemetry/api/metrics/DefaultMeterTest.java index a42b194f81a..69b32d5ecc0 100644 --- a/api/all/src/test/java/io/opentelemetry/api/metrics/DefaultMeterTest.java +++ b/api/all/src/test/java/io/opentelemetry/api/metrics/DefaultMeterTest.java @@ -5,172 +5,17 @@ package io.opentelemetry.api.metrics; -import static io.opentelemetry.api.common.AttributeKey.stringKey; +import io.opentelemetry.api.testing.internal.AbstractDefaultMeterTest; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.context.Context; -import io.opentelemetry.internal.testing.slf4j.SuppressLogger; -import org.junit.jupiter.api.Test; +public class DefaultMeterTest extends AbstractDefaultMeterTest { -@SuppressLogger() -public class DefaultMeterTest { - private static final Meter METER = DefaultMeter.getInstance(); - - @Test - void noopLongCounter_doesNotThrow() { - LongCounter counter = - METER.counterBuilder("size").setDescription("The size I'm measuring").setUnit("1").build(); - counter.add(1); - counter.add(1, Attributes.of(stringKey("thing"), "car")); - counter.add(1, Attributes.of(stringKey("thing"), "car"), Context.current()); - } - - @Test - void noopDoubleCounter_doesNotThrow() { - DoubleCounter counter = - METER - .counterBuilder("size") - .ofDoubles() - .setDescription("The size I'm measuring") - .setUnit("1") - .build(); - counter.add(1.2); - counter.add(2.5, Attributes.of(stringKey("thing"), "car")); - counter.add(2.5, Attributes.of(stringKey("thing"), "car"), Context.current()); - } - - @Test - void noopLongUpDownCounter_doesNotThrow() { - LongUpDownCounter counter = - METER - .upDownCounterBuilder("size") - .setDescription("The size I'm measuring") - .setUnit("1") - .build(); - counter.add(-1); - counter.add(1, Attributes.of(stringKey("thing"), "car")); - counter.add(1, Attributes.of(stringKey("thing"), "car"), Context.current()); - } - - @Test - void noopDoubleUpDownCounter_doesNotThrow() { - DoubleUpDownCounter counter = - METER - .upDownCounterBuilder("size") - .ofDoubles() - .setDescription("The size I'm measuring") - .setUnit("1") - .build(); - counter.add(-2e4); - counter.add(1.0e-1, Attributes.of(stringKey("thing"), "car")); - counter.add(1.0e-1, Attributes.of(stringKey("thing"), "car"), Context.current()); - } - - @Test - void noopLongHistogram_doesNotThrow() { - LongHistogram histogram = - METER - .histogramBuilder("size") - .ofLongs() - .setDescription("The size I'm measuring") - .setUnit("1") - .build(); - histogram.record(-1); - histogram.record(1, Attributes.of(stringKey("thing"), "car")); - histogram.record(1, Attributes.of(stringKey("thing"), "car"), Context.current()); - } - - @Test - void noopDoubleHistogram_doesNotThrow() { - DoubleHistogram histogram = - METER - .histogramBuilder("size") - .setDescription("The size I'm measuring") - .setUnit("1") - .build(); - histogram.record(-2e4); - histogram.record(1.0e-1, Attributes.of(stringKey("thing"), "car")); - histogram.record(1.0e-1, Attributes.of(stringKey("thing"), "car"), Context.current()); - } - - @Test - void noopObservableLongGauage_doesNotThrow() { - METER - .gaugeBuilder("temperature") - .ofLongs() - .setDescription("The current temperature") - .setUnit("C") - .buildWithCallback( - m -> { - m.record(1); - m.record(2, Attributes.of(stringKey("thing"), "engine")); - }); - } - - @Test - void noopObservableDoubleGauage_doesNotThrow() { - METER - .gaugeBuilder("temperature") - .setDescription("The current temperature") - .setUnit("C") - .buildWithCallback( - m -> { - m.record(1.0e1); - m.record(-27.4, Attributes.of(stringKey("thing"), "engine")); - }); - } - - @Test - void noopObservableLongCounter_doesNotThrow() { - METER - .counterBuilder("temperature") - .setDescription("The current temperature") - .setUnit("C") - .buildWithCallback( - m -> { - m.record(1); - m.record(2, Attributes.of(stringKey("thing"), "engine")); - }); - } - - @Test - void noopObservableDoubleCounter_doesNotThrow() { - METER - .counterBuilder("temperature") - .ofDoubles() - .setDescription("The current temperature") - .setUnit("C") - .buildWithCallback( - m -> { - m.record(1.0e1); - m.record(-27.4, Attributes.of(stringKey("thing"), "engine")); - }); - } - - @Test - void noopObservableLongUpDownCounter_doesNotThrow() { - METER - .upDownCounterBuilder("temperature") - .setDescription("The current temperature") - .setUnit("C") - .buildWithCallback( - m -> { - m.record(1); - m.record(2, Attributes.of(stringKey("thing"), "engine")); - }); + @Override + protected Meter getMeter() { + return DefaultMeter.getInstance(); } - @Test - void noopObservableDoubleUpDownCounter_doesNotThrow() { - METER - .upDownCounterBuilder("temperature") - .ofDoubles() - .setDescription("The current temperature") - .setUnit("C") - .buildWithCallback( - m -> { - m.record(1.0e1); - m.record(-27.4, Attributes.of(stringKey("thing"), "engine")); - }); + @Override + protected MeterProvider getMeterProvider() { + return DefaultMeterProvider.getInstance(); } } diff --git a/api/all/src/test/java/io/opentelemetry/api/trace/DefaultTracerProviderTest.java b/api/all/src/test/java/io/opentelemetry/api/trace/DefaultTracerProviderTest.java deleted file mode 100644 index 72a98f29f59..00000000000 --- a/api/all/src/test/java/io/opentelemetry/api/trace/DefaultTracerProviderTest.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.trace; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.Test; - -class DefaultTracerProviderTest { - - @Test - void returnsDefaultTracer() { - assertThat(TracerProvider.noop().get("test")).isInstanceOf(DefaultTracer.class); - assertThat(TracerProvider.noop().get("test", "1.0")).isInstanceOf(DefaultTracer.class); - } -} diff --git a/api/all/src/test/java/io/opentelemetry/api/trace/DefaultTracerTest.java b/api/all/src/test/java/io/opentelemetry/api/trace/DefaultTracerTest.java index e1379592062..b6736fa3843 100644 --- a/api/all/src/test/java/io/opentelemetry/api/trace/DefaultTracerTest.java +++ b/api/all/src/test/java/io/opentelemetry/api/trace/DefaultTracerTest.java @@ -5,77 +5,17 @@ package io.opentelemetry.api.trace; -import static org.assertj.core.api.Assertions.assertThat; +import io.opentelemetry.api.testing.internal.AbstractDefaultTracerTest; -import io.opentelemetry.context.Context; -import org.junit.jupiter.api.Test; +class DefaultTracerTest extends AbstractDefaultTracerTest { -/** Unit tests for {@link DefaultTracer}. */ -// Need to suppress warnings for MustBeClosed because Android 14 does not support -// try-with-resources. -@SuppressWarnings("MustBeClosedChecker") -class DefaultTracerTest { - private static final Tracer defaultTracer = DefaultTracer.getInstance(); - private static final String SPAN_NAME = "MySpanName"; - private static final SpanContext spanContext = - SpanContext.create( - "00000000000000000000000000000061", - "0000000000000061", - TraceFlags.getDefault(), - TraceState.getDefault()); - - @Test - void defaultSpanBuilderWithName() { - assertThat(defaultTracer.spanBuilder(SPAN_NAME).startSpan().getSpanContext().isValid()) - .isFalse(); - } - - @Test - void testSpanContextPropagationExplicitParent() { - Span span = - defaultTracer - .spanBuilder(SPAN_NAME) - .setParent(Context.root().with(Span.wrap(spanContext))) - .startSpan(); - assertThat(span.getSpanContext()).isSameAs(spanContext); - } - - @Test - void testSpanContextPropagation() { - Span parent = Span.wrap(spanContext); - - Span span = - defaultTracer.spanBuilder(SPAN_NAME).setParent(Context.root().with(parent)).startSpan(); - assertThat(span.getSpanContext()).isSameAs(spanContext); - } - - @Test - void noSpanContextMakesInvalidSpans() { - Span span = defaultTracer.spanBuilder(SPAN_NAME).startSpan(); - assertThat(span.getSpanContext()).isSameAs(SpanContext.getInvalid()); + @Override + public Tracer getTracer() { + return DefaultTracer.getInstance(); } - @Test - void testSpanContextPropagation_fromContext() { - Context context = Context.current().with(Span.wrap(spanContext)); - - Span span = defaultTracer.spanBuilder(SPAN_NAME).setParent(context).startSpan(); - assertThat(span.getSpanContext()).isSameAs(spanContext); - } - - @Test - void testSpanContextPropagation_fromContextAfterNoParent() { - Context context = Context.current().with(Span.wrap(spanContext)); - - Span span = defaultTracer.spanBuilder(SPAN_NAME).setNoParent().setParent(context).startSpan(); - assertThat(span.getSpanContext()).isSameAs(spanContext); - } - - @Test - void testSpanContextPropagation_fromContextThenNoParent() { - Context context = Context.current().with(Span.wrap(spanContext)); - - Span span = defaultTracer.spanBuilder(SPAN_NAME).setParent(context).setNoParent().startSpan(); - assertThat(span.getSpanContext()).isEqualTo(SpanContext.getInvalid()); + @Override + public TracerProvider getTracerProvider() { + return DefaultTracerProvider.getInstance(); } } diff --git a/api/all/src/test/java/io/opentelemetry/api/trace/SpanBuilderTest.java b/api/all/src/test/java/io/opentelemetry/api/trace/SpanBuilderTest.java deleted file mode 100644 index a7ed2dc27cb..00000000000 --- a/api/all/src/test/java/io/opentelemetry/api/trace/SpanBuilderTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.trace; - -import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatCode; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.context.Context; -import java.time.Instant; -import java.util.concurrent.TimeUnit; -import org.junit.jupiter.api.Test; - -/** Unit tests for {@link SpanBuilder}. */ -class SpanBuilderTest { - private final Tracer tracer = DefaultTracer.getInstance(); - - @Test - void doNotCrash_NoopImplementation() { - assertThatCode( - () -> { - SpanBuilder spanBuilder = tracer.spanBuilder(null); - spanBuilder.setSpanKind(null); - spanBuilder.setParent(null); - spanBuilder.setNoParent(); - spanBuilder.addLink(null); - spanBuilder.addLink(null, Attributes.empty()); - spanBuilder.addLink(SpanContext.getInvalid(), null); - spanBuilder.setAttribute((String) null, "foo"); - spanBuilder.setAttribute("foo", null); - spanBuilder.setAttribute(null, 0L); - spanBuilder.setAttribute(null, 0.0); - spanBuilder.setAttribute(null, false); - spanBuilder.setAttribute((AttributeKey) null, "foo"); - spanBuilder.setAttribute(stringKey(null), "foo"); - spanBuilder.setAttribute(stringKey(""), "foo"); - spanBuilder.setAttribute(stringKey("foo"), null); - spanBuilder.setStartTimestamp(-1, TimeUnit.MILLISECONDS); - spanBuilder.setStartTimestamp(1, null); - spanBuilder.setParent(Context.root().with(Span.wrap(null))); - spanBuilder.setParent(Context.root()); - spanBuilder.setNoParent(); - spanBuilder.addLink(Span.getInvalid().getSpanContext()); - spanBuilder.addLink(Span.getInvalid().getSpanContext(), Attributes.empty()); - spanBuilder.setAttribute("key", "value"); - spanBuilder.setAttribute("key", 12345L); - spanBuilder.setAttribute("key", .12345); - spanBuilder.setAttribute("key", true); - spanBuilder.setAttribute(stringKey("key"), "value"); - spanBuilder.setAllAttributes(Attributes.of(stringKey("key"), "value")); - spanBuilder.setAllAttributes(Attributes.empty()); - spanBuilder.setAllAttributes(null); - spanBuilder.setStartTimestamp(12345L, TimeUnit.NANOSECONDS); - spanBuilder.setStartTimestamp(Instant.EPOCH); - spanBuilder.setStartTimestamp(null); - assertThat(spanBuilder.startSpan().getSpanContext().isValid()).isFalse(); - }) - .doesNotThrowAnyException(); - } -} diff --git a/api/events/build.gradle.kts b/api/events/build.gradle.kts deleted file mode 100644 index 9a0262cded0..00000000000 --- a/api/events/build.gradle.kts +++ /dev/null @@ -1,13 +0,0 @@ -plugins { - id("otel.java-conventions") - id("otel.publish-conventions") - - id("otel.animalsniffer-conventions") -} - -description = "OpenTelemetry Events API" -otelJava.moduleName.set("io.opentelemetry.api.events") - -dependencies { - api(project(":api:all")) -} diff --git a/api/events/gradle.properties b/api/events/gradle.properties deleted file mode 100644 index bbcbb896228..00000000000 --- a/api/events/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -otel.release=alpha \ No newline at end of file diff --git a/api/events/src/main/java/io/opentelemetry/api/events/DefaultEventEmitter.java b/api/events/src/main/java/io/opentelemetry/api/events/DefaultEventEmitter.java deleted file mode 100644 index 2be4164d010..00000000000 --- a/api/events/src/main/java/io/opentelemetry/api/events/DefaultEventEmitter.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.events; - -import io.opentelemetry.api.common.Attributes; - -class DefaultEventEmitter implements EventEmitter { - - private static final EventEmitter INSTANCE = new DefaultEventEmitter(); - - private DefaultEventEmitter() {} - - static EventEmitter getInstance() { - return INSTANCE; - } - - @Override - public void emit(String eventName, Attributes attributes) {} -} diff --git a/api/events/src/main/java/io/opentelemetry/api/events/DefaultEventEmitterProvider.java b/api/events/src/main/java/io/opentelemetry/api/events/DefaultEventEmitterProvider.java deleted file mode 100644 index 2f5b69826fd..00000000000 --- a/api/events/src/main/java/io/opentelemetry/api/events/DefaultEventEmitterProvider.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.events; - -class DefaultEventEmitterProvider implements EventEmitterProvider { - - private static final EventEmitterProvider INSTANCE = new DefaultEventEmitterProvider(); - private static final EventEmitterBuilder NOOP_EVENT_EMITTER_BUILDER = - new NoopEventEmitterBuilder(); - - private DefaultEventEmitterProvider() {} - - static EventEmitterProvider getInstance() { - return INSTANCE; - } - - @Override - public EventEmitterBuilder eventEmitterBuilder(String instrumentationScopeName) { - return NOOP_EVENT_EMITTER_BUILDER; - } - - private static class NoopEventEmitterBuilder implements EventEmitterBuilder { - - @Override - public EventEmitterBuilder setSchemaUrl(String schemaUrl) { - return this; - } - - @Override - public EventEmitterBuilder setInstrumentationVersion(String instrumentationVersion) { - return this; - } - - @Override - public EventEmitterBuilder setEventDomain(String eventDomain) { - return this; - } - - @Override - public EventEmitter build() { - return DefaultEventEmitter.getInstance(); - } - } -} diff --git a/api/events/src/main/java/io/opentelemetry/api/events/EventEmitter.java b/api/events/src/main/java/io/opentelemetry/api/events/EventEmitter.java deleted file mode 100644 index 69df8407f43..00000000000 --- a/api/events/src/main/java/io/opentelemetry/api/events/EventEmitter.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.events; - -import io.opentelemetry.api.common.Attributes; -import javax.annotation.concurrent.ThreadSafe; - -/** - * A {@link EventEmitter} is the entry point into an event pipeline. - * - *

Example usage emitting events: - * - *

{@code
- * class MyClass {
- *   private final EventEmitter eventEmitter = openTelemetryEventEmitterProvider.eventEmitterBuilder("scope-name")
- *         .setEventDomain("acme.observability")
- *         .build();
- *
- *   void doWork() {
- *     eventEmitter.emit("my-event", Attributes.builder()
- *         .put("key1", "value1")
- *         .put("key2", "value2")
- *         .build())
- *     // do work
- *   }
- * }
- * }
- */ -@ThreadSafe -public interface EventEmitter { - - /** - * Emit an event. - * - * @param eventName the event name, which acts as a classifier for events. Within a particular - * event domain, event name defines a particular class or type of event. - * @param attributes attributes associated with the event - */ - void emit(String eventName, Attributes attributes); -} diff --git a/api/events/src/main/java/io/opentelemetry/api/events/EventEmitterBuilder.java b/api/events/src/main/java/io/opentelemetry/api/events/EventEmitterBuilder.java deleted file mode 100644 index 0aa67a25b08..00000000000 --- a/api/events/src/main/java/io/opentelemetry/api/events/EventEmitterBuilder.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.events; - -/** - * Builder class for creating {@link EventEmitter} instances. - * - *

{@link EventEmitter}s are identified by their scope name, version, and schema URL. These - * identifying fields, along with attributes, combine to form the instrumentation scope, which is - * attached to all events produced by the {@link EventEmitter}. - */ -public interface EventEmitterBuilder { - - /** - * Sets the event domain. Event domain is not part of {@link EventEmitter} identity. - * - * @param eventDomain The event domain, which acts as a namespace for event names. Within a - * particular event domain, event name defines a particular class or type of event. - * @return this - */ - EventEmitterBuilder setEventDomain(String eventDomain); - - /** - * Set the scope schema URL of the resulting {@link EventEmitter}. Schema URL is part of {@link - * EventEmitter} identity. - * - * @param schemaUrl The schema URL. - * @return this - */ - EventEmitterBuilder setSchemaUrl(String schemaUrl); - - /** - * Sets the instrumentation scope version of the resulting {@link EventEmitter}. Version is part - * of {@link EventEmitter} identity. - * - * @param instrumentationScopeVersion The instrumentation scope version. - * @return this - */ - EventEmitterBuilder setInstrumentationVersion(String instrumentationScopeVersion); - - /** - * Gets or creates a {@link EventEmitter} instance. - * - * @return a {@link EventEmitter} instance configured with the provided options. - */ - EventEmitter build(); -} diff --git a/api/events/src/main/java/io/opentelemetry/api/events/EventEmitterProvider.java b/api/events/src/main/java/io/opentelemetry/api/events/EventEmitterProvider.java deleted file mode 100644 index 521154765fa..00000000000 --- a/api/events/src/main/java/io/opentelemetry/api/events/EventEmitterProvider.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.events; - -import javax.annotation.concurrent.ThreadSafe; - -/** - * A registry for creating scoped {@link EventEmitter}s. The name Provider is for consistency - * with other languages and it is NOT loaded using reflection. - * - * @see EventEmitter - */ -@ThreadSafe -public interface EventEmitterProvider { - - /** - * Gets or creates a named EventEmitter instance which emits events to the {@code eventDomain}. - * - * @param instrumentationScopeName A name uniquely identifying the instrumentation scope, such as - * the instrumentation library, package, or fully qualified class name. Must not be null. - * @return a Logger instance. - */ - default EventEmitter get(String instrumentationScopeName) { - return eventEmitterBuilder(instrumentationScopeName).build(); - } - - /** - * Creates a LoggerBuilder for a named EventEmitter instance. - * - * @param instrumentationScopeName A name uniquely identifying the instrumentation scope, such as - * the instrumentation library, package, or fully qualified class name. Must not be null. - * @return a LoggerBuilder instance. - */ - EventEmitterBuilder eventEmitterBuilder(String instrumentationScopeName); - - /** - * Returns a no-op {@link EventEmitterProvider} which provides Loggers which do not record or - * emit. - */ - static EventEmitterProvider noop() { - return DefaultEventEmitterProvider.getInstance(); - } -} diff --git a/api/events/src/main/java/io/opentelemetry/api/events/GlobalEventEmitterProvider.java b/api/events/src/main/java/io/opentelemetry/api/events/GlobalEventEmitterProvider.java deleted file mode 100644 index ddf62a6d5a0..00000000000 --- a/api/events/src/main/java/io/opentelemetry/api/events/GlobalEventEmitterProvider.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.events; - -import io.opentelemetry.api.GlobalOpenTelemetry; -import java.util.concurrent.atomic.AtomicReference; -import javax.annotation.Nullable; - -/** - * This class provides a temporary global accessor for {@link EventEmitterProvider} until the event - * API is marked stable. It will eventually be merged into {@link GlobalOpenTelemetry}. - */ -// We intentionally assign to be used for error reporting. -@SuppressWarnings("StaticAssignmentOfThrowable") -public final class GlobalEventEmitterProvider { - - private static final AtomicReference instance = - new AtomicReference<>(EventEmitterProvider.noop()); - - @Nullable private static volatile Throwable setInstanceCaller; - - private GlobalEventEmitterProvider() {} - - /** Returns the globally registered {@link EventEmitterProvider}. */ - // instance cannot be set to null - @SuppressWarnings("NullAway") - public static EventEmitterProvider get() { - return instance.get(); - } - - /** - * Sets the global {@link EventEmitterProvider}. Future calls to {@link #get()} will return the - * provided {@link EventEmitterProvider} instance. This should be called once as early as possible - * in your application initialization logic. - */ - public static void set(EventEmitterProvider eventEmitterProvider) { - boolean changed = instance.compareAndSet(EventEmitterProvider.noop(), eventEmitterProvider); - if (!changed && (eventEmitterProvider != EventEmitterProvider.noop())) { - throw new IllegalStateException( - "GlobalEventEmitterProvider.set has already been called. GlobalEventEmitterProvider.set " - + "must be called only once before any calls to GlobalEventEmitterProvider.get. " - + "Previous invocation set to cause of this exception.", - setInstanceCaller); - } - setInstanceCaller = new Throwable(); - } - - /** - * Unsets the global {@link EventEmitterProvider}. This is only meant to be used from tests which - * need to reconfigure {@link EventEmitterProvider}. - */ - public static void resetForTest() { - instance.set(EventEmitterProvider.noop()); - } -} diff --git a/api/events/src/test/java/io/opentelemetry/api/events/DefaultEventEmitterProviderTest.java b/api/events/src/test/java/io/opentelemetry/api/events/DefaultEventEmitterProviderTest.java deleted file mode 100644 index 33651e89c6e..00000000000 --- a/api/events/src/test/java/io/opentelemetry/api/events/DefaultEventEmitterProviderTest.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.events; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatCode; - -import io.opentelemetry.api.common.Attributes; -import org.junit.jupiter.api.Test; - -class DefaultEventEmitterProviderTest { - - @Test - void noopEventEmitterProvider_doesNotThrow() { - EventEmitterProvider provider = EventEmitterProvider.noop(); - - assertThat(provider).isSameAs(DefaultEventEmitterProvider.getInstance()); - assertThatCode(() -> provider.get("scope-name")).doesNotThrowAnyException(); - assertThatCode( - () -> - provider - .eventEmitterBuilder("scope-name") - .setEventDomain("event-domain") - .setInstrumentationVersion("1.0") - .setSchemaUrl("http://schema.com") - .build()) - .doesNotThrowAnyException(); - - assertThatCode( - () -> - provider - .eventEmitterBuilder("scope-name") - .build() - .emit("event-name", Attributes.empty())) - .doesNotThrowAnyException(); - } -} diff --git a/api/events/src/test/java/io/opentelemetry/api/events/DefaultEventEmitterTest.java b/api/events/src/test/java/io/opentelemetry/api/events/DefaultEventEmitterTest.java deleted file mode 100644 index fb6bcabfc91..00000000000 --- a/api/events/src/test/java/io/opentelemetry/api/events/DefaultEventEmitterTest.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.events; - -import static org.assertj.core.api.Assertions.assertThatCode; - -import io.opentelemetry.api.common.Attributes; -import org.junit.jupiter.api.Test; - -class DefaultEventEmitterTest { - - @Test - void emit() { - assertThatCode(() -> DefaultEventEmitter.getInstance().emit("event-name", Attributes.empty())) - .doesNotThrowAnyException(); - assertThatCode( - () -> - DefaultEventEmitter.getInstance() - .emit("event-name", Attributes.builder().put("key1", "value1").build())) - .doesNotThrowAnyException(); - } -} diff --git a/api/events/src/test/java/io/opentelemetry/api/events/GlobalEventEmitterProviderTest.java b/api/events/src/test/java/io/opentelemetry/api/events/GlobalEventEmitterProviderTest.java deleted file mode 100644 index f05763ea2f9..00000000000 --- a/api/events/src/test/java/io/opentelemetry/api/events/GlobalEventEmitterProviderTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.api.events; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -class GlobalEventEmitterProviderTest { - - @BeforeAll - static void beforeClass() { - GlobalEventEmitterProvider.resetForTest(); - } - - @AfterEach - void after() { - GlobalEventEmitterProvider.resetForTest(); - } - - @Test - void setAndGet() { - assertThat(GlobalEventEmitterProvider.get()).isEqualTo(EventEmitterProvider.noop()); - EventEmitterProvider eventEmitterProvider = - instrumentationScopeName -> - EventEmitterProvider.noop().eventEmitterBuilder(instrumentationScopeName); - GlobalEventEmitterProvider.set(eventEmitterProvider); - assertThat(GlobalEventEmitterProvider.get()).isEqualTo(eventEmitterProvider); - } - - @Test - void setThenSet() { - GlobalEventEmitterProvider.set( - instrumentationScopeName -> - EventEmitterProvider.noop().eventEmitterBuilder(instrumentationScopeName)); - assertThatThrownBy( - () -> - GlobalEventEmitterProvider.set( - instrumentationScopeName -> - EventEmitterProvider.noop().eventEmitterBuilder(instrumentationScopeName))) - .isInstanceOf(IllegalStateException.class) - .hasMessageContaining("GlobalEventEmitterProvider.set has already been called") - .hasStackTraceContaining("setThenSet"); - } -} diff --git a/api/incubator/README.md b/api/incubator/README.md new file mode 100644 index 00000000000..e0b0ec7a080 --- /dev/null +++ b/api/incubator/README.md @@ -0,0 +1,37 @@ +# API Incubator + +Experimental APIs, including Event API, extended Log Bridge APIs, extended Metrics APIs, extended ContextPropagator APIs, and extended Trace APIs. + +## Extended Log Bridge API + +Features: + +* Check if logger is enabled before emitting logs to avoid unnecessary computation + +See [ExtendedLogsBridgeApiUsageTest](./src/test/java/io/opentelemetry/api/incubator/logs/ExtendedLogsBridgeApiUsageTest.java). + +## Extended Metrics APIs + +Features: + +* Attributes advice + +See [ExtendedMetricsApiUsageTest](./src/test/java/io/opentelemetry/api/incubator/metrics/ExtendedMetricsApiUsageTest.java). + +## Extended ContextPropagator APIs + +Features: + +* Check if instrument is enabled before recording measurements to avoid unnecessary computation +* Simplified injection / extraction of context + +See [ExtendedContextPropagatorsUsageTest](./src/test/java/io/opentelemetry/api/incubator/propagation/ExtendedContextPropagatorsUsageTest.java). + +## Extended Trace APIs + +Features: + +* Check if tracer is enabled before starting spans to avoid unnecessary computation +* Utility methods to reduce boilerplace using span API, including extracting context, and wrapping runnables / callables with spans + +See [ExtendedTraceApiUsageTest](./src/test/java/io/opentelemetry/api/incubator/trace/ExtendedTraceApiUsageTest.java). diff --git a/api/incubator/build.gradle.kts b/api/incubator/build.gradle.kts new file mode 100644 index 00000000000..205b5504a0f --- /dev/null +++ b/api/incubator/build.gradle.kts @@ -0,0 +1,26 @@ +plugins { + id("otel.java-conventions") + id("otel.publish-conventions") + + id("otel.jmh-conventions") + id("otel.animalsniffer-conventions") +} + +description = "OpenTelemetry API Incubator" +otelJava.moduleName.set("io.opentelemetry.api.incubator") + +dependencies { + api(project(":api:all")) + + annotationProcessor("com.google.auto.value:auto-value") + + // To use parsed config file as input for InstrumentationConfigUtilTest + testImplementation(project(":sdk-extensions:incubator")) + + testImplementation(project(":sdk:testing")) + testImplementation(project(":api:testing-internal")) + + testImplementation("io.opentelemetry.semconv:opentelemetry-semconv-incubating") + + testImplementation("com.google.guava:guava") +} diff --git a/exporters/sender/jdk/gradle.properties b/api/incubator/gradle.properties similarity index 100% rename from exporters/sender/jdk/gradle.properties rename to api/incubator/gradle.properties diff --git a/extensions/incubator/src/jmh/java/io/opentelemetry/extension/incubator/PassThroughPropagatorBenchmark.java b/api/incubator/src/jmh/java/io/opentelemetry/extension/incubator/PassThroughPropagatorBenchmark.java similarity index 97% rename from extensions/incubator/src/jmh/java/io/opentelemetry/extension/incubator/PassThroughPropagatorBenchmark.java rename to api/incubator/src/jmh/java/io/opentelemetry/extension/incubator/PassThroughPropagatorBenchmark.java index 06b68e6d45c..8d340985ae3 100644 --- a/extensions/incubator/src/jmh/java/io/opentelemetry/extension/incubator/PassThroughPropagatorBenchmark.java +++ b/api/incubator/src/jmh/java/io/opentelemetry/extension/incubator/PassThroughPropagatorBenchmark.java @@ -5,6 +5,7 @@ package io.opentelemetry.extension.incubator; +import io.opentelemetry.api.incubator.propagation.PassThroughPropagator; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.api.trace.TraceFlags; @@ -13,7 +14,6 @@ import io.opentelemetry.context.Context; import io.opentelemetry.context.propagation.TextMapGetter; import io.opentelemetry.context.propagation.TextMapPropagator; -import io.opentelemetry.extension.incubator.propagation.PassThroughPropagator; import java.util.Collections; import java.util.HashMap; import java.util.Map; diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/ConfigProvider.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/ConfigProvider.java new file mode 100644 index 00000000000..62cc044e653 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/ConfigProvider.java @@ -0,0 +1,37 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.config; + +import javax.annotation.Nullable; +import javax.annotation.concurrent.ThreadSafe; + +/** + * A registry for accessing declarative configuration. + * + *

The name Provider is for consistency with other languages and it is NOT loaded + * using reflection. + * + *

See {@link InstrumentationConfigUtil} for convenience methods for extracting config from + * {@link ConfigProvider}. + */ +@ThreadSafe +public interface ConfigProvider { + + /** + * Returns the {@link DeclarativeConfigProperties} corresponding to instrumentation + * config, or {@code null} if unavailable. + * + * @return the instrumentation {@link DeclarativeConfigProperties} + */ + @Nullable + DeclarativeConfigProperties getInstrumentationConfig(); + + /** Returns a no-op {@link ConfigProvider}. */ + static ConfigProvider noop() { + return () -> null; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/DeclarativeConfigException.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/DeclarativeConfigException.java new file mode 100644 index 00000000000..3ce49c6454d --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/DeclarativeConfigException.java @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.config; + +/** An exception that is thrown when errors occur with declarative configuration. */ +public final class DeclarativeConfigException extends RuntimeException { + + private static final long serialVersionUID = 3036584181551130522L; + + /** Create a new configuration exception with specified {@code message} and without a cause. */ + public DeclarativeConfigException(String message) { + super(message); + } + + /** Create a new configuration exception with specified {@code message} and {@code cause}. */ + public DeclarativeConfigException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/DeclarativeConfigProperties.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/DeclarativeConfigProperties.java new file mode 100644 index 00000000000..ef8e2343f22 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/DeclarativeConfigProperties.java @@ -0,0 +1,225 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.config; + +import static io.opentelemetry.api.internal.ConfigUtil.defaultIfNull; + +import java.util.List; +import java.util.Set; +import javax.annotation.Nullable; + +/** + * An interface for accessing declarative configuration data. + * + *

An instance of {@link DeclarativeConfigProperties} is equivalent to a YAML mapping node. It has accessors for + * reading scalar properties, {@link #getStructured(String)} for reading children which are + * themselves mappings, and {@link #getStructuredList(String)} for reading children which are + * sequences of mappings. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public interface DeclarativeConfigProperties { + + /** + * Return an empty {@link DeclarativeConfigProperties} instance. + * + *

Useful for walking the tree without checking for null. For example, to access a string key + * nested at .foo.bar.baz, call: {@code config.getStructured("foo", empty()).getStructured("bar", + * empty()).getString("baz")}. + */ + static DeclarativeConfigProperties empty() { + return EmptyDeclarativeConfigProperties.getInstance(); + } + + /** + * Returns a {@link String} configuration property. + * + * @return null if the property has not been configured + * @throws DeclarativeConfigException if the property is not a valid scalar string + */ + @Nullable + String getString(String name); + + /** + * Returns a {@link String} configuration property. + * + * @return a {@link String} configuration property or {@code defaultValue} if a property with + * {@code name} has not been configured + * @throws DeclarativeConfigException if the property is not a valid scalar string + */ + default String getString(String name, String defaultValue) { + return defaultIfNull(getString(name), defaultValue); + } + + /** + * Returns a {@link Boolean} configuration property. Implementations should use the same rules as + * {@link Boolean#parseBoolean(String)} for handling the values. + * + * @return null if the property has not been configured + * @throws DeclarativeConfigException if the property is not a valid scalar boolean + */ + @Nullable + Boolean getBoolean(String name); + + /** + * Returns a {@link Boolean} configuration property. + * + * @return a {@link Boolean} configuration property or {@code defaultValue} if a property with + * {@code name} has not been configured + * @throws DeclarativeConfigException if the property is not a valid scalar boolean + */ + default boolean getBoolean(String name, boolean defaultValue) { + return defaultIfNull(getBoolean(name), defaultValue); + } + + /** + * Returns a {@link Integer} configuration property. + * + *

If the underlying config property is {@link Long}, it is converted to {@link Integer} with + * {@link Long#intValue()} which may result in loss of precision. + * + * @return null if the property has not been configured + * @throws DeclarativeConfigException if the property is not a valid scalar integer + */ + @Nullable + Integer getInt(String name); + + /** + * Returns a {@link Integer} configuration property. + * + *

If the underlying config property is {@link Long}, it is converted to {@link Integer} with + * {@link Long#intValue()} which may result in loss of precision. + * + * @return a {@link Integer} configuration property or {@code defaultValue} if a property with + * {@code name} has not been configured + * @throws DeclarativeConfigException if the property is not a valid scalar integer + */ + default int getInt(String name, int defaultValue) { + return defaultIfNull(getInt(name), defaultValue); + } + + /** + * Returns a {@link Long} configuration property. + * + * @return null if the property has not been configured + * @throws DeclarativeConfigException if the property is not a valid scalar long + */ + @Nullable + Long getLong(String name); + + /** + * Returns a {@link Long} configuration property. + * + * @return a {@link Long} configuration property or {@code defaultValue} if a property with {@code + * name} has not been configured + * @throws DeclarativeConfigException if the property is not a valid scalar long + */ + default long getLong(String name, long defaultValue) { + return defaultIfNull(getLong(name), defaultValue); + } + + /** + * Returns a {@link Double} configuration property. + * + * @return null if the property has not been configured + * @throws DeclarativeConfigException if the property is not a valid scalar double + */ + @Nullable + Double getDouble(String name); + + /** + * Returns a {@link Double} configuration property. + * + * @return a {@link Double} configuration property or {@code defaultValue} if a property with + * {@code name} has not been configured + * @throws DeclarativeConfigException if the property is not a valid scalar double + */ + default double getDouble(String name, double defaultValue) { + return defaultIfNull(getDouble(name), defaultValue); + } + + /** + * Returns a {@link List} configuration property. Empty values and values which do not map to the + * {@code scalarType} will be removed. + * + * @param name the property name + * @param scalarType the scalar type, one of {@link String}, {@link Boolean}, {@link Long} or + * {@link Double} + * @return a {@link List} configuration property, or null if the property has not been configured + * @throws DeclarativeConfigException if the property is not a valid sequence of scalars, or if + * {@code scalarType} is not supported + */ + @Nullable + List getScalarList(String name, Class scalarType); + + /** + * Returns a {@link List} configuration property. Entries which are not strings are converted to + * their string representation. + * + * @param name the property name + * @param scalarType the scalar type, one of {@link String}, {@link Boolean}, {@link Long} or + * {@link Double} + * @return a {@link List} configuration property or {@code defaultValue} if a property with {@code + * name} has not been configured + * @throws DeclarativeConfigException if the property is not a valid sequence of scalars + */ + default List getScalarList(String name, Class scalarType, List defaultValue) { + return defaultIfNull(getScalarList(name, scalarType), defaultValue); + } + + /** + * Returns a {@link DeclarativeConfigProperties} configuration property. + * + * @return a map-valued configuration property, or {@code null} if {@code name} has not been + * configured + * @throws DeclarativeConfigException if the property is not a mapping + */ + @Nullable + DeclarativeConfigProperties getStructured(String name); + + /** + * Returns a list of {@link DeclarativeConfigProperties} configuration property. + * + * @return a map-valued configuration property, or {@code defaultValue} if {@code name} has not + * been configured + * @throws DeclarativeConfigException if the property is not a mapping + */ + default DeclarativeConfigProperties getStructured( + String name, DeclarativeConfigProperties defaultValue) { + return defaultIfNull(getStructured(name), defaultValue); + } + + /** + * Returns a list of {@link DeclarativeConfigProperties} configuration property. + * + * @return a list of map-valued configuration property, or {@code null} if {@code name} has not + * been configured + * @throws DeclarativeConfigException if the property is not a sequence of mappings + */ + @Nullable + List getStructuredList(String name); + + /** + * Returns a list of {@link DeclarativeConfigProperties} configuration property. + * + * @return a list of map-valued configuration property, or {@code defaultValue} if {@code name} + * has not been configured + * @throws DeclarativeConfigException if the property is not a sequence of mappings + */ + default List getStructuredList( + String name, List defaultValue) { + return defaultIfNull(getStructuredList(name), defaultValue); + } + + /** + * Returns a set of all configuration property keys. + * + * @return the configuration property keys + */ + Set getPropertyKeys(); +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/EmptyDeclarativeConfigProperties.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/EmptyDeclarativeConfigProperties.java new file mode 100644 index 00000000000..77b8a265492 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/EmptyDeclarativeConfigProperties.java @@ -0,0 +1,77 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.config; + +import java.util.Collections; +import java.util.List; +import java.util.Set; +import javax.annotation.Nullable; + +/** Empty instance of {@link DeclarativeConfigProperties}. */ +final class EmptyDeclarativeConfigProperties implements DeclarativeConfigProperties { + + private static final EmptyDeclarativeConfigProperties INSTANCE = + new EmptyDeclarativeConfigProperties(); + + private EmptyDeclarativeConfigProperties() {} + + static EmptyDeclarativeConfigProperties getInstance() { + return INSTANCE; + } + + @Nullable + @Override + public String getString(String name) { + return null; + } + + @Nullable + @Override + public Boolean getBoolean(String name) { + return null; + } + + @Nullable + @Override + public Integer getInt(String name) { + return null; + } + + @Nullable + @Override + public Long getLong(String name) { + return null; + } + + @Nullable + @Override + public Double getDouble(String name) { + return null; + } + + @Nullable + @Override + public List getScalarList(String name, Class scalarType) { + return null; + } + + @Nullable + @Override + public DeclarativeConfigProperties getStructured(String name) { + return null; + } + + @Nullable + @Override + public List getStructuredList(String name) { + return null; + } + + @Override + public Set getPropertyKeys() { + return Collections.emptySet(); + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/GlobalConfigProvider.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/GlobalConfigProvider.java new file mode 100644 index 00000000000..b0daef4968d --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/GlobalConfigProvider.java @@ -0,0 +1,63 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.config; + +import io.opentelemetry.api.GlobalOpenTelemetry; +import java.util.concurrent.atomic.AtomicReference; +import javax.annotation.Nullable; + +/** + * This class provides a temporary global accessor for {@link ConfigProvider} until the + * instrumentation config API is marked stable. It will eventually be merged into {@link + * GlobalOpenTelemetry}. + */ +// We intentionally assign to be used for error reporting. +@SuppressWarnings("StaticAssignmentOfThrowable") +public final class GlobalConfigProvider { + + private static final AtomicReference instance = + new AtomicReference<>(ConfigProvider.noop()); + + @SuppressWarnings("NonFinalStaticField") + @Nullable + private static volatile Throwable setInstanceCaller; + + private GlobalConfigProvider() {} + + /** Returns the globally registered {@link ConfigProvider}. */ + // instance cannot be set to null + @SuppressWarnings("NullAway") + public static ConfigProvider get() { + return instance.get(); + } + + /** + * Sets the global {@link ConfigProvider}. Future calls to {@link #get()} will return the provided + * {@link ConfigProvider} instance. This should be called once as early as possible in your + * application initialization logic. + * + * @throws IllegalStateException when called more than once + */ + public static void set(ConfigProvider configProvider) { + boolean changed = instance.compareAndSet(ConfigProvider.noop(), configProvider); + if (!changed && (configProvider != ConfigProvider.noop())) { + throw new IllegalStateException( + "GlobalConfigProvider.set has already been called. GlobalConfigProvider.set " + + "must be called only once before any calls to GlobalConfigProvider.get. " + + "Previous invocation set to cause of this exception.", + setInstanceCaller); + } + setInstanceCaller = new Throwable(); + } + + /** + * Unsets the global {@link ConfigProvider}. This is only meant to be used from tests which need + * to reconfigure {@link ConfigProvider}. + */ + public static void resetForTest() { + instance.set(ConfigProvider.noop()); + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/InstrumentationConfigUtil.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/InstrumentationConfigUtil.java new file mode 100644 index 00000000000..e1e95e93153 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/config/InstrumentationConfigUtil.java @@ -0,0 +1,151 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.config; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import javax.annotation.Nullable; + +/** + * A collection of convenience methods to extract instrumentation config from {@link + * ConfigProvider#getInstrumentationConfig()}. + */ +public class InstrumentationConfigUtil { + + /** + * Return a map representation of the peer service map entries in {@code + * .instrumentation.general.peer.service_mapping}, or null if none is configured. + * + * @throws DeclarativeConfigException if an unexpected type is encountered accessing the property + */ + @Nullable + public static Map peerServiceMapping(ConfigProvider configProvider) { + List serviceMappingList = + getOrNull( + configProvider, + config -> config.getStructuredList("service_mapping"), + "general", + "peer"); + if (serviceMappingList == null) { + return null; + } + Map serviceMapping = new LinkedHashMap<>(); + serviceMappingList.forEach( + entry -> { + String peer = entry.getString("peer"); + String service = entry.getString("service"); + if (peer != null && service != null) { + serviceMapping.put(peer, service); + } + }); + return serviceMapping.isEmpty() ? null : serviceMapping; + } + + /** + * Return {@code .instrumentation.general.http.client.request_captured_headers}, or null if none + * is configured. + * + * @throws DeclarativeConfigException if an unexpected type is encountered accessing the property + */ + @Nullable + public static List httpClientRequestCapturedHeaders(ConfigProvider configProvider) { + return getOrNull( + configProvider, + config -> config.getScalarList("request_captured_headers", String.class), + "general", + "http", + "client"); + } + + /** + * Return {@code .instrumentation.general.http.client.response_captured_headers}, or null if none + * is configured. + * + * @throws DeclarativeConfigException if an unexpected type is encountered accessing the property + */ + @Nullable + public static List httpClientResponseCapturedHeaders(ConfigProvider configProvider) { + return getOrNull( + configProvider, + config -> config.getScalarList("response_captured_headers", String.class), + "general", + "http", + "client"); + } + + /** + * Return {@code .instrumentation.general.http.server.request_captured_headers}, or null if none + * is configured. + * + * @throws DeclarativeConfigException if an unexpected type is encountered accessing the property + */ + @Nullable + public static List httpServerRequestCapturedHeaders(ConfigProvider configProvider) { + return getOrNull( + configProvider, + config -> config.getScalarList("request_captured_headers", String.class), + "general", + "http", + "server"); + } + + /** + * Return {@code .instrumentation.general.http.server.response_captured_headers}, or null if none + * is configured. + * + * @throws DeclarativeConfigException if an unexpected type is encountered accessing the property + */ + @Nullable + public static List httpSeverResponseCapturedHeaders(ConfigProvider configProvider) { + return getOrNull( + configProvider, + config -> config.getScalarList("response_captured_headers", String.class), + "general", + "http", + "server"); + } + + /** + * Return {@code .instrumentation.java.}, or null if none is configured. + * + * @throws DeclarativeConfigException if an unexpected type is encountered accessing the property + */ + @Nullable + public static DeclarativeConfigProperties javaInstrumentationConfig( + ConfigProvider configProvider, String instrumentationName) { + return getOrNull(configProvider, config -> config.getStructured(instrumentationName), "java"); + } + + /** + * Walk down the {@code segments} of {@link ConfigProvider#getInstrumentationConfig()} and call + * {@code accessor} on the terminal node. Returns null if {@link + * ConfigProvider#getInstrumentationConfig()} is null, or if null is encountered walking the + * {@code segments}, or if {@code accessor} returns null. + * + *

See other methods in {@link InstrumentationConfigUtil} for usage examples. + */ + @Nullable + public static T getOrNull( + ConfigProvider configProvider, + Function accessor, + String... segments) { + DeclarativeConfigProperties config = configProvider.getInstrumentationConfig(); + if (config == null) { + return null; + } + for (String segment : segments) { + config = config.getStructured(segment); + if (config == null) { + return null; + } + } + return accessor.apply(config); + } + + private InstrumentationConfigUtil() {} +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/logs/ExtendedDefaultLogger.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/logs/ExtendedDefaultLogger.java new file mode 100644 index 00000000000..378af1c6b00 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/logs/ExtendedDefaultLogger.java @@ -0,0 +1,96 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.logs; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.logs.LogRecordBuilder; +import io.opentelemetry.api.logs.Logger; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.context.Context; +import java.time.Instant; +import java.util.concurrent.TimeUnit; + +class ExtendedDefaultLogger implements ExtendedLogger { + + private static final Logger INSTANCE = new ExtendedDefaultLogger(); + private static final ExtendedLogRecordBuilder NOOP_LOG_RECORD_BUILDER = + new NoopExtendedLogRecordBuilder(); + + private ExtendedDefaultLogger() {} + + static Logger getNoop() { + return INSTANCE; + } + + @Override + public ExtendedLogRecordBuilder logRecordBuilder() { + return NOOP_LOG_RECORD_BUILDER; + } + + private static final class NoopExtendedLogRecordBuilder implements ExtendedLogRecordBuilder { + + private NoopExtendedLogRecordBuilder() {} + + @Override + public ExtendedLogRecordBuilder setEventName(String eventName) { + return this; + } + + @Override + public LogRecordBuilder setTimestamp(long timestamp, TimeUnit unit) { + return this; + } + + @Override + public LogRecordBuilder setTimestamp(Instant instant) { + return this; + } + + @Override + public LogRecordBuilder setObservedTimestamp(long timestamp, TimeUnit unit) { + return this; + } + + @Override + public LogRecordBuilder setObservedTimestamp(Instant instant) { + return this; + } + + @Override + public LogRecordBuilder setContext(Context context) { + return this; + } + + @Override + public LogRecordBuilder setSeverity(Severity severity) { + return this; + } + + @Override + public LogRecordBuilder setSeverityText(String severityText) { + return this; + } + + @Override + public LogRecordBuilder setBody(String body) { + return this; + } + + @Override + public LogRecordBuilder setBody(Value body) { + return this; + } + + @Override + public LogRecordBuilder setAttribute(AttributeKey key, T value) { + return this; + } + + @Override + public void emit() {} + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/logs/ExtendedDefaultLoggerProvider.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/logs/ExtendedDefaultLoggerProvider.java new file mode 100644 index 00000000000..6cf93296689 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/logs/ExtendedDefaultLoggerProvider.java @@ -0,0 +1,45 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.logs; + +import io.opentelemetry.api.logs.Logger; +import io.opentelemetry.api.logs.LoggerBuilder; +import io.opentelemetry.api.logs.LoggerProvider; + +public class ExtendedDefaultLoggerProvider implements LoggerProvider { + + private static final LoggerProvider INSTANCE = new ExtendedDefaultLoggerProvider(); + private static final LoggerBuilder NOOP_BUILDER = new NoopLoggerBuilder(); + + private ExtendedDefaultLoggerProvider() {} + + public static LoggerProvider getNoop() { + return INSTANCE; + } + + @Override + public LoggerBuilder loggerBuilder(String instrumentationScopeName) { + return NOOP_BUILDER; + } + + private static class NoopLoggerBuilder implements LoggerBuilder { + + @Override + public LoggerBuilder setSchemaUrl(String schemaUrl) { + return this; + } + + @Override + public LoggerBuilder setInstrumentationVersion(String instrumentationVersion) { + return this; + } + + @Override + public Logger build() { + return ExtendedDefaultLogger.getNoop(); + } + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/logs/ExtendedLogRecordBuilder.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/logs/ExtendedLogRecordBuilder.java new file mode 100644 index 00000000000..4a18baa07ed --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/logs/ExtendedLogRecordBuilder.java @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.logs; + +import io.opentelemetry.api.logs.LogRecordBuilder; + +/** Extended {@link LogRecordBuilder} with experimental APIs. */ +public interface ExtendedLogRecordBuilder extends LogRecordBuilder { + + // keep this class even if it is empty, since experimental methods may be added in the future. + + /** + * Sets the event name, which identifies the class / type of the Event. + * + *

This name should uniquely identify the event structure (both attributes and body). A log + * record with a non-empty event name is an Event. + */ + ExtendedLogRecordBuilder setEventName(String eventName); +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/logs/ExtendedLogger.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/logs/ExtendedLogger.java new file mode 100644 index 00000000000..de159406c0e --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/logs/ExtendedLogger.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.logs; + +import io.opentelemetry.api.logs.Logger; + +/** Extended {@link Logger} with experimental APIs. */ +public interface ExtendedLogger extends Logger { + + /** + * Returns {@code true} if the logger is enabled. + * + *

This allows callers to avoid unnecessary compute when nothing is consuming the data. Because + * the response is subject to change over the application, callers should call this before each + * call to {@link #logRecordBuilder()}. + */ + default boolean isEnabled() { + return true; + } + + @Override + ExtendedLogRecordBuilder logRecordBuilder(); +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDefaultMeter.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDefaultMeter.java new file mode 100644 index 00000000000..de1ec1fdefc --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDefaultMeter.java @@ -0,0 +1,454 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.BatchCallback; +import io.opentelemetry.api.metrics.DoubleCounter; +import io.opentelemetry.api.metrics.DoubleCounterBuilder; +import io.opentelemetry.api.metrics.DoubleGauge; +import io.opentelemetry.api.metrics.DoubleGaugeBuilder; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.DoubleHistogramBuilder; +import io.opentelemetry.api.metrics.DoubleUpDownCounter; +import io.opentelemetry.api.metrics.DoubleUpDownCounterBuilder; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongCounterBuilder; +import io.opentelemetry.api.metrics.LongGauge; +import io.opentelemetry.api.metrics.LongGaugeBuilder; +import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.LongHistogramBuilder; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.LongUpDownCounterBuilder; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.metrics.ObservableDoubleCounter; +import io.opentelemetry.api.metrics.ObservableDoubleGauge; +import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; +import io.opentelemetry.api.metrics.ObservableDoubleUpDownCounter; +import io.opentelemetry.api.metrics.ObservableLongCounter; +import io.opentelemetry.api.metrics.ObservableLongGauge; +import io.opentelemetry.api.metrics.ObservableLongMeasurement; +import io.opentelemetry.api.metrics.ObservableLongUpDownCounter; +import io.opentelemetry.api.metrics.ObservableMeasurement; +import io.opentelemetry.context.Context; +import java.util.function.Consumer; +import javax.annotation.concurrent.ThreadSafe; + +/** + * No-op implementation of {@link Meter}. + * + *

This implementation should induce as close to zero overhead as possible. + */ +@ThreadSafe +class ExtendedDefaultMeter implements Meter { + + private static final Meter INSTANCE = new ExtendedDefaultMeter(); + + private static final LongCounterBuilder NOOP_LONG_COUNTER_BUILDER = new NoopLongCounterBuilder(); + private static final LongUpDownCounterBuilder NOOP_LONG_UP_DOWN_COUNTER_BUILDER = + new NoopLongUpDownCounterBuilder(); + private static final DoubleHistogramBuilder NOOP_DOUBLE_HISTOGRAM_BUILDER = + new NoopDoubleHistogramBuilder(); + private static final DoubleGaugeBuilder NOOP_DOUBLE_GAUGE_BUILDER = new NoopDoubleGaugeBuilder(); + private static final BatchCallback NOOP_BATCH_CALLBACK = new BatchCallback() {}; + private static final ObservableDoubleMeasurement NOOP_OBSERVABLE_DOUBLE_MEASUREMENT = + new NoopObservableDoubleMeasurement(); + private static final ObservableLongMeasurement NOOP_OBSERVABLE_LONG_MEASUREMENT = + new NoopObservableLongMeasurement(); + + static Meter getNoop() { + return INSTANCE; + } + + @Override + public LongCounterBuilder counterBuilder(String name) { + return NOOP_LONG_COUNTER_BUILDER; + } + + @Override + public LongUpDownCounterBuilder upDownCounterBuilder(String name) { + return NOOP_LONG_UP_DOWN_COUNTER_BUILDER; + } + + @Override + public DoubleHistogramBuilder histogramBuilder(String name) { + return NOOP_DOUBLE_HISTOGRAM_BUILDER; + } + + @Override + public DoubleGaugeBuilder gaugeBuilder(String name) { + return NOOP_DOUBLE_GAUGE_BUILDER; + } + + @Override + public BatchCallback batchCallback( + Runnable callback, + ObservableMeasurement observableMeasurement, + ObservableMeasurement... additionalMeasurements) { + return NOOP_BATCH_CALLBACK; + } + + private ExtendedDefaultMeter() {} + + private static class NoopLongCounter implements ExtendedLongCounter { + @Override + public void add(long value, Attributes attributes, Context context) {} + + @Override + public void add(long value, Attributes attributes) {} + + @Override + public void add(long value) {} + } + + private static class NoopDoubleCounter implements ExtendedDoubleCounter { + @Override + public void add(double value, Attributes attributes, Context context) {} + + @Override + public void add(double value, Attributes attributes) {} + + @Override + public void add(double value) {} + } + + private static class NoopLongCounterBuilder implements ExtendedLongCounterBuilder { + private static final LongCounter NOOP_COUNTER = new NoopLongCounter(); + private static final ObservableLongCounter NOOP_OBSERVABLE_COUNTER = + new ObservableLongCounter() {}; + private static final DoubleCounterBuilder NOOP_DOUBLE_COUNTER_BUILDER = + new NoopDoubleCounterBuilder(); + + @Override + public LongCounterBuilder setDescription(String description) { + return this; + } + + @Override + public LongCounterBuilder setUnit(String unit) { + return this; + } + + @Override + public DoubleCounterBuilder ofDoubles() { + return NOOP_DOUBLE_COUNTER_BUILDER; + } + + @Override + public LongCounter build() { + return NOOP_COUNTER; + } + + @Override + public ObservableLongCounter buildWithCallback(Consumer callback) { + return NOOP_OBSERVABLE_COUNTER; + } + + @Override + public ObservableLongMeasurement buildObserver() { + return NOOP_OBSERVABLE_LONG_MEASUREMENT; + } + } + + private static class NoopDoubleCounterBuilder implements ExtendedDoubleCounterBuilder { + private static final DoubleCounter NOOP_COUNTER = new NoopDoubleCounter(); + private static final ObservableDoubleCounter NOOP_OBSERVABLE_COUNTER = + new ObservableDoubleCounter() {}; + + @Override + public DoubleCounterBuilder setDescription(String description) { + return this; + } + + @Override + public DoubleCounterBuilder setUnit(String unit) { + return this; + } + + @Override + public DoubleCounter build() { + return NOOP_COUNTER; + } + + @Override + public ObservableDoubleCounter buildWithCallback( + Consumer callback) { + return NOOP_OBSERVABLE_COUNTER; + } + + @Override + public ObservableDoubleMeasurement buildObserver() { + return NOOP_OBSERVABLE_DOUBLE_MEASUREMENT; + } + } + + private static class NoopLongUpDownCounter implements ExtendedLongUpDownCounter { + @Override + public void add(long value, Attributes attributes, Context context) {} + + @Override + public void add(long value, Attributes attributes) {} + + @Override + public void add(long value) {} + } + + private static class NoopDoubleUpDownCounter implements ExtendedDoubleUpDownCounter { + @Override + public void add(double value, Attributes attributes, Context context) {} + + @Override + public void add(double value, Attributes attributes) {} + + @Override + public void add(double value) {} + } + + private static class NoopLongUpDownCounterBuilder implements ExtendedLongUpDownCounterBuilder { + private static final LongUpDownCounter NOOP_UP_DOWN_COUNTER = new NoopLongUpDownCounter() {}; + private static final ObservableLongUpDownCounter NOOP_OBSERVABLE_UP_DOWN_COUNTER = + new ObservableLongUpDownCounter() {}; + private static final DoubleUpDownCounterBuilder NOOP_DOUBLE_UP_DOWN_COUNTER_BUILDER = + new NoopDoubleUpDownCounterBuilder(); + + @Override + public LongUpDownCounterBuilder setDescription(String description) { + return this; + } + + @Override + public LongUpDownCounterBuilder setUnit(String unit) { + return this; + } + + @Override + public DoubleUpDownCounterBuilder ofDoubles() { + return NOOP_DOUBLE_UP_DOWN_COUNTER_BUILDER; + } + + @Override + public LongUpDownCounter build() { + return NOOP_UP_DOWN_COUNTER; + } + + @Override + public ObservableLongUpDownCounter buildWithCallback( + Consumer callback) { + return NOOP_OBSERVABLE_UP_DOWN_COUNTER; + } + + @Override + public ObservableLongMeasurement buildObserver() { + return NOOP_OBSERVABLE_LONG_MEASUREMENT; + } + } + + private static class NoopDoubleUpDownCounterBuilder + implements ExtendedDoubleUpDownCounterBuilder { + private static final DoubleUpDownCounter NOOP_UP_DOWN_COUNTER = + new NoopDoubleUpDownCounter() {}; + private static final ObservableDoubleUpDownCounter NOOP_OBSERVABLE_UP_DOWN_COUNTER = + new ObservableDoubleUpDownCounter() {}; + + @Override + public DoubleUpDownCounterBuilder setDescription(String description) { + return this; + } + + @Override + public DoubleUpDownCounterBuilder setUnit(String unit) { + return this; + } + + @Override + public DoubleUpDownCounter build() { + return NOOP_UP_DOWN_COUNTER; + } + + @Override + public ObservableDoubleUpDownCounter buildWithCallback( + Consumer callback) { + return NOOP_OBSERVABLE_UP_DOWN_COUNTER; + } + + @Override + public ObservableDoubleMeasurement buildObserver() { + return NOOP_OBSERVABLE_DOUBLE_MEASUREMENT; + } + } + + private static class NoopDoubleHistogram implements ExtendedDoubleHistogram { + @Override + public void record(double value, Attributes attributes, Context context) {} + + @Override + public void record(double value, Attributes attributes) {} + + @Override + public void record(double value) {} + } + + private static class NoopLongHistogram implements ExtendedLongHistogram { + @Override + public void record(long value, Attributes attributes, Context context) {} + + @Override + public void record(long value, Attributes attributes) {} + + @Override + public void record(long value) {} + } + + private static class NoopDoubleHistogramBuilder implements ExtendedDoubleHistogramBuilder { + private static final DoubleHistogram NOOP = new NoopDoubleHistogram(); + private static final LongHistogramBuilder NOOP_LONG_HISTOGRAM_BUILDER = + new NoopLongHistogramBuilder(); + + @Override + public DoubleHistogramBuilder setDescription(String description) { + return this; + } + + @Override + public DoubleHistogramBuilder setUnit(String unit) { + return this; + } + + @Override + public LongHistogramBuilder ofLongs() { + return NOOP_LONG_HISTOGRAM_BUILDER; + } + + @Override + public DoubleHistogram build() { + return NOOP; + } + } + + private static class NoopLongHistogramBuilder implements ExtendedLongHistogramBuilder { + private static final LongHistogram NOOP = new NoopLongHistogram(); + + @Override + public LongHistogramBuilder setDescription(String description) { + return this; + } + + @Override + public LongHistogramBuilder setUnit(String unit) { + return this; + } + + @Override + public LongHistogram build() { + return NOOP; + } + } + + private static class NoopDoubleGaugeBuilder implements ExtendedDoubleGaugeBuilder { + private static final ObservableDoubleGauge NOOP_OBSERVABLE_GAUGE = + new ObservableDoubleGauge() {}; + private static final LongGaugeBuilder NOOP_LONG_GAUGE_BUILDER = new NoopLongGaugeBuilder(); + private static final NoopDoubleGauge NOOP_GAUGE = new NoopDoubleGauge(); + + @Override + public DoubleGaugeBuilder setDescription(String description) { + return this; + } + + @Override + public DoubleGaugeBuilder setUnit(String unit) { + return this; + } + + @Override + public LongGaugeBuilder ofLongs() { + return NOOP_LONG_GAUGE_BUILDER; + } + + @Override + public ObservableDoubleGauge buildWithCallback(Consumer callback) { + return NOOP_OBSERVABLE_GAUGE; + } + + @Override + public ObservableDoubleMeasurement buildObserver() { + return NOOP_OBSERVABLE_DOUBLE_MEASUREMENT; + } + + @Override + public DoubleGauge build() { + return NOOP_GAUGE; + } + } + + private static class NoopDoubleGauge implements ExtendedDoubleGauge { + @Override + public void set(double value) {} + + @Override + public void set(double value, Attributes attributes) {} + + @Override + public void set(double value, Attributes attributes, Context context) {} + } + + private static class NoopLongGaugeBuilder implements ExtendedLongGaugeBuilder { + private static final ObservableLongGauge NOOP_OBSERVABLE_GAUGE = new ObservableLongGauge() {}; + private static final NoopLongGauge NOOP_GAUGE = new NoopLongGauge(); + + @Override + public LongGaugeBuilder setDescription(String description) { + return this; + } + + @Override + public LongGaugeBuilder setUnit(String unit) { + return this; + } + + @Override + public ObservableLongGauge buildWithCallback(Consumer callback) { + return NOOP_OBSERVABLE_GAUGE; + } + + @Override + public ObservableLongMeasurement buildObserver() { + return NOOP_OBSERVABLE_LONG_MEASUREMENT; + } + + @Override + public LongGauge build() { + return NOOP_GAUGE; + } + } + + private static class NoopLongGauge implements ExtendedLongGauge { + @Override + public void set(long value) {} + + @Override + public void set(long value, Attributes attributes) {} + + @Override + public void set(long value, Attributes attributes, Context context) {} + } + + private static class NoopObservableDoubleMeasurement implements ObservableDoubleMeasurement { + @Override + public void record(double value) {} + + @Override + public void record(double value, Attributes attributes) {} + } + + private static class NoopObservableLongMeasurement implements ObservableLongMeasurement { + @Override + public void record(long value) {} + + @Override + public void record(long value, Attributes attributes) {} + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDefaultMeterProvider.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDefaultMeterProvider.java new file mode 100644 index 00000000000..3eeca2081f8 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDefaultMeterProvider.java @@ -0,0 +1,45 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.metrics.MeterBuilder; +import io.opentelemetry.api.metrics.MeterProvider; + +/** A {@link MeterProvider} that does nothing. */ +public class ExtendedDefaultMeterProvider implements MeterProvider { + @Override + public MeterBuilder meterBuilder(String instrumentationScopeName) { + return BUILDER_INSTANCE; + } + + private static final ExtendedDefaultMeterProvider INSTANCE = new ExtendedDefaultMeterProvider(); + private static final MeterBuilder BUILDER_INSTANCE = new NoopMeterBuilder(); + + public static MeterProvider getNoop() { + return INSTANCE; + } + + private ExtendedDefaultMeterProvider() {} + + private static class NoopMeterBuilder implements MeterBuilder { + + @Override + public MeterBuilder setSchemaUrl(String schemaUrl) { + return this; + } + + @Override + public MeterBuilder setInstrumentationVersion(String instrumentationScopeVersion) { + return this; + } + + @Override + public Meter build() { + return ExtendedDefaultMeter.getNoop(); + } + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleCounter.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleCounter.java new file mode 100644 index 00000000000..4345661ebe5 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleCounter.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleCounter; +import io.opentelemetry.context.Context; + +/** Extended {@link DoubleCounter} with experimental APIs. */ +public interface ExtendedDoubleCounter extends DoubleCounter { + + /** + * Returns {@code true} if the counter is enabled. + * + *

This allows callers to avoid unnecessary compute when nothing is consuming the data. Because + * the response is subject to change over the application, callers should call this before each + * call to {@link #add(double)}, {@link #add(double, Attributes)}, or {@link #add(double, + * Attributes, Context)}. + */ + default boolean isEnabled() { + return true; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleCounterBuilder.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleCounterBuilder.java new file mode 100644 index 00000000000..d601318c87f --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleCounterBuilder.java @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.metrics.DoubleCounterBuilder; +import java.util.List; + +/** Extended {@link DoubleCounterBuilder} with experimental APIs. */ +public interface ExtendedDoubleCounterBuilder extends DoubleCounterBuilder { + + /** + * Specify the attribute advice, which suggests the recommended set of attribute keys to be used + * for this counter. + */ + default ExtendedDoubleCounterBuilder setAttributesAdvice(List> attributes) { + return this; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleGauge.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleGauge.java new file mode 100644 index 00000000000..d9a56f7a391 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleGauge.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleGauge; +import io.opentelemetry.context.Context; + +/** Extended {@link DoubleGauge} with experimental APIs. */ +public interface ExtendedDoubleGauge extends DoubleGauge { + + /** + * Returns {@code true} if the gauge is enabled. + * + *

This allows callers to avoid unnecessary compute when nothing is consuming the data. Because + * the response is subject to change over the application, callers should call this before each + * call to {@link #set(double)}, {@link #set(double, Attributes)}, or {@link #set(double, + * Attributes, Context)}. + */ + default boolean isEnabled() { + return true; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleGaugeBuilder.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleGaugeBuilder.java new file mode 100644 index 00000000000..eb576559609 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleGaugeBuilder.java @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.metrics.DoubleGaugeBuilder; +import java.util.List; + +/** Extended {@link DoubleGaugeBuilder} with experimental APIs. */ +public interface ExtendedDoubleGaugeBuilder extends DoubleGaugeBuilder { + + /** + * Specify the attribute advice, which suggests the recommended set of attribute keys to be used + * for this gauge. + */ + default ExtendedDoubleGaugeBuilder setAttributesAdvice(List> attributes) { + return this; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleHistogram.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleHistogram.java new file mode 100644 index 00000000000..0a481afef2b --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleHistogram.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.context.Context; + +/** Extended {@link DoubleHistogram} with experimental APIs. */ +public interface ExtendedDoubleHistogram extends DoubleHistogram { + + /** + * Returns {@code true} if the histogram is enabled. + * + *

This allows callers to avoid unnecessary compute when nothing is consuming the data. Because + * the response is subject to change over the application, callers should call this before each + * call to {@link #record(double)}, {@link #record(double, Attributes)}, or {@link #record(double, + * Attributes, Context)}. + */ + default boolean isEnabled() { + return true; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleHistogramBuilder.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleHistogramBuilder.java new file mode 100644 index 00000000000..8caf8f372f0 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleHistogramBuilder.java @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.metrics.DoubleHistogramBuilder; +import java.util.List; + +/** Extended {@link DoubleHistogramBuilder} with experimental APIs. */ +public interface ExtendedDoubleHistogramBuilder extends DoubleHistogramBuilder { + + /** + * Specify the attribute advice, which suggests the recommended set of attribute keys to be used + * for this histogram. + */ + default ExtendedDoubleHistogramBuilder setAttributesAdvice(List> attributes) { + return this; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleUpDownCounter.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleUpDownCounter.java new file mode 100644 index 00000000000..6dbb91f1d6f --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleUpDownCounter.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleUpDownCounter; +import io.opentelemetry.context.Context; + +/** Extended {@link DoubleUpDownCounter} with experimental APIs. */ +public interface ExtendedDoubleUpDownCounter extends DoubleUpDownCounter { + + /** + * Returns {@code true} if the up down counter is enabled. + * + *

This allows callers to avoid unnecessary compute when nothing is consuming the data. Because + * the response is subject to change over the application, callers should call this before each + * call to {@link #add(double)}, {@link #add(double, Attributes)}, or {@link #add(double, + * Attributes, Context)}. + */ + default boolean isEnabled() { + return true; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleUpDownCounterBuilder.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleUpDownCounterBuilder.java new file mode 100644 index 00000000000..8262b448b13 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedDoubleUpDownCounterBuilder.java @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.metrics.DoubleUpDownCounterBuilder; +import java.util.List; + +/** Extended {@link DoubleUpDownCounterBuilder} with experimental APIs. */ +public interface ExtendedDoubleUpDownCounterBuilder extends DoubleUpDownCounterBuilder { + + /** + * Specify the attribute advice, which suggests the recommended set of attribute keys to be used + * for this up down counter. + */ + default ExtendedDoubleUpDownCounterBuilder setAttributesAdvice(List> attributes) { + return this; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongCounter.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongCounter.java new file mode 100644 index 00000000000..0ff67a38fb9 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongCounter.java @@ -0,0 +1,27 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleCounter; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.context.Context; + +/** Extended {@link DoubleCounter} with experimental APIs. */ +public interface ExtendedLongCounter extends LongCounter { + + /** + * Returns {@code true} if the counter is enabled. + * + *

This allows callers to avoid unnecessary compute when nothing is consuming the data. Because + * the response is subject to change over the application, callers should call this before each + * call to {@link #add(long)}, {@link #add(long, Attributes)}, or {@link #add(long, Attributes, + * Context)}. + */ + default boolean isEnabled() { + return true; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongCounterBuilder.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongCounterBuilder.java new file mode 100644 index 00000000000..04b7ae6f633 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongCounterBuilder.java @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.metrics.LongCounterBuilder; +import java.util.List; + +/** Extended {@link LongCounterBuilder} with experimental APIs. */ +public interface ExtendedLongCounterBuilder extends LongCounterBuilder { + + /** + * Specify the attribute advice, which suggests the recommended set of attribute keys to be used + * for this counter. + */ + default ExtendedLongCounterBuilder setAttributesAdvice(List> attributes) { + return this; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongGauge.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongGauge.java new file mode 100644 index 00000000000..7a660d0e007 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongGauge.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.LongGauge; +import io.opentelemetry.context.Context; + +/** Extended {@link LongGauge} with experimental APIs. */ +public interface ExtendedLongGauge extends LongGauge { + + /** + * Returns {@code true} if the gauge is enabled. + * + *

This allows callers to avoid unnecessary compute when nothing is consuming the data. Because + * the response is subject to change over the application, callers should call this before each + * call to {@link #set(long)}, {@link #set(long, Attributes)}, or {@link #set(long, Attributes, + * Context)}. + */ + default boolean isEnabled() { + return true; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongGaugeBuilder.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongGaugeBuilder.java new file mode 100644 index 00000000000..db05e0958f3 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongGaugeBuilder.java @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.metrics.LongGaugeBuilder; +import java.util.List; + +/** Extended {@link LongGaugeBuilder} with experimental APIs. */ +public interface ExtendedLongGaugeBuilder extends LongGaugeBuilder { + + /** + * Specify the attribute advice, which suggests the recommended set of attribute keys to be used + * for this gauge. + */ + default ExtendedLongGaugeBuilder setAttributesAdvice(List> attributes) { + return this; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongHistogram.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongHistogram.java new file mode 100644 index 00000000000..d1cd303fb7d --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongHistogram.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.context.Context; + +/** Extended {@link LongHistogram} with experimental APIs. */ +public interface ExtendedLongHistogram extends LongHistogram { + + /** + * Returns {@code true} if the histogram is enabled. + * + *

This allows callers to avoid unnecessary compute when nothing is consuming the data. Because + * the response is subject to change over the application, callers should call this before each + * call to {@link #record(long)}, {@link #record(long, Attributes)}, or {@link #record(long, + * Attributes, Context)}. + */ + default boolean isEnabled() { + return true; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongHistogramBuilder.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongHistogramBuilder.java new file mode 100644 index 00000000000..9986443f143 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongHistogramBuilder.java @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.metrics.LongHistogramBuilder; +import java.util.List; + +/** Extended {@link LongHistogramBuilder} with experimental APIs. */ +public interface ExtendedLongHistogramBuilder extends LongHistogramBuilder { + + /** + * Specify the attribute advice, which suggests the recommended set of attribute keys to be used + * for this histogram. + */ + default ExtendedLongHistogramBuilder setAttributesAdvice(List> attributes) { + return this; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongUpDownCounter.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongUpDownCounter.java new file mode 100644 index 00000000000..7327ed43842 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongUpDownCounter.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.context.Context; + +/** Extended {@link LongUpDownCounter} with experimental APIs. */ +public interface ExtendedLongUpDownCounter extends LongUpDownCounter { + + /** + * Returns {@code true} if the up down counter is enabled. + * + *

This allows callers to avoid unnecessary compute when nothing is consuming the data. Because + * the response is subject to change over the application, callers should call this before each + * call to {@link #add(long)}, {@link #add(long, Attributes)}, or {@link #add(long, Attributes, + * Context)}. + */ + default boolean isEnabled() { + return true; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongUpDownCounterBuilder.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongUpDownCounterBuilder.java new file mode 100644 index 00000000000..b1d8c2f0b18 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/metrics/ExtendedLongUpDownCounterBuilder.java @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.metrics.LongUpDownCounterBuilder; +import java.util.List; + +/** Extended {@link LongUpDownCounterBuilder} with experimental APIs. */ +public interface ExtendedLongUpDownCounterBuilder extends LongUpDownCounterBuilder { + + /** + * Specify the attribute advice, which suggests the recommended set of attribute keys to be used + * for this up down counter. + */ + default ExtendedLongUpDownCounterBuilder setAttributesAdvice(List> attributes) { + return this; + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/propagation/CaseInsensitiveMap.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/propagation/CaseInsensitiveMap.java new file mode 100644 index 00000000000..64a33350508 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/propagation/CaseInsensitiveMap.java @@ -0,0 +1,44 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.propagation; + +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import javax.annotation.Nullable; + +class CaseInsensitiveMap extends HashMap { + + private static final long serialVersionUID = -4202518750189126871L; + + CaseInsensitiveMap() {} + + CaseInsensitiveMap(Map carrier) { + if (carrier != null) { + this.putAll(carrier); + } + } + + @Override + public String put(String key, String value) { + return super.put(getKeyLowerCase(key), value); + } + + @Override + public void putAll(Map m) { + m.forEach(this::put); + } + + private static String getKeyLowerCase(String key) { + return key.toLowerCase(Locale.ROOT); + } + + @Override + @Nullable + public String get(Object key) { + return super.get(getKeyLowerCase((String) key)); + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/propagation/ExtendedContextPropagators.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/propagation/ExtendedContextPropagators.java new file mode 100644 index 00000000000..2af3cae35ed --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/propagation/ExtendedContextPropagators.java @@ -0,0 +1,81 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.propagation; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.context.propagation.TextMapGetter; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import javax.annotation.Nullable; + +/** + * Utility class to simplify context propagation. + * + *

The README + * explains the use cases in more detail. + */ +public final class ExtendedContextPropagators { + + private ExtendedContextPropagators() {} + + private static final TextMapGetter> TEXT_MAP_GETTER = + new TextMapGetter>() { + @Override + public Set keys(Map carrier) { + return carrier.keySet(); + } + + @Override + @Nullable + public String get(@Nullable Map carrier, String key) { + return carrier == null ? null : carrier.get(key); + } + }; + + /** + * Injects the current context into a string map, which can then be added to HTTP headers or the + * metadata of a message. + * + * @param propagators provide the propagators from {@link OpenTelemetry#getPropagators()} + */ + public static Map getTextMapPropagationContext(ContextPropagators propagators) { + Map carrier = new HashMap<>(); + propagators + .getTextMapPropagator() + .inject( + Context.current(), + carrier, + (map, key, value) -> { + if (map != null) { + map.put(key, value); + } + }); + + return Collections.unmodifiableMap(carrier); + } + + /** + * Extract the context from a string map, which you get from HTTP headers of the metadata of a + * message you're processing. + * + * @param carrier the string map + * @param propagators provide the propagators from {@link OpenTelemetry#getPropagators()} + */ + public static Context extractTextMapPropagationContext( + Map carrier, ContextPropagators propagators) { + Context current = Context.current(); + if (carrier == null) { + return current; + } + CaseInsensitiveMap caseInsensitiveMap = new CaseInsensitiveMap(carrier); + return propagators.getTextMapPropagator().extract(current, caseInsensitiveMap, TEXT_MAP_GETTER); + } +} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/propagation/PassThroughPropagator.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/propagation/PassThroughPropagator.java similarity index 98% rename from extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/propagation/PassThroughPropagator.java rename to api/incubator/src/main/java/io/opentelemetry/api/incubator/propagation/PassThroughPropagator.java index 37b41248df5..a5f8507a13f 100644 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/propagation/PassThroughPropagator.java +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/propagation/PassThroughPropagator.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.extension.incubator.propagation; +package io.opentelemetry.api.incubator.propagation; import static java.util.Objects.requireNonNull; diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedDefaultTracer.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedDefaultTracer.java new file mode 100644 index 00000000000..45346099a5d --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedDefaultTracer.java @@ -0,0 +1,155 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.trace; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.incubator.propagation.ExtendedContextPropagators; +import io.opentelemetry.api.internal.ApiUsageLogger; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.propagation.ContextPropagators; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; +import javax.annotation.Nullable; +import javax.annotation.concurrent.ThreadSafe; + +/** No-op implementation of {@link ExtendedTracer}. */ +@ThreadSafe +final class ExtendedDefaultTracer implements ExtendedTracer { + + private static final Tracer INSTANCE = new ExtendedDefaultTracer(); + + static Tracer getNoop() { + return INSTANCE; + } + + @Override + public ExtendedSpanBuilder spanBuilder(String spanName) { + return NoopSpanBuilder.create(); + } + + private ExtendedDefaultTracer() {} + + // Noop implementation of Span.Builder. + private static final class NoopSpanBuilder implements ExtendedSpanBuilder { + static NoopSpanBuilder create() { + return new NoopSpanBuilder(); + } + + @Nullable private SpanContext spanContext; + + @Override + public Span startSpan() { + if (spanContext == null) { + spanContext = Span.current().getSpanContext(); + } + + return Span.wrap(spanContext); + } + + @Override + public NoopSpanBuilder setParent(Context context) { + if (context == null) { + ApiUsageLogger.log("context is null"); + return this; + } + spanContext = Span.fromContext(context).getSpanContext(); + return this; + } + + @Override + public NoopSpanBuilder setParentFrom( + ContextPropagators propagators, Map carrier) { + setParent(ExtendedContextPropagators.extractTextMapPropagationContext(carrier, propagators)); + return this; + } + + @Override + public NoopSpanBuilder setNoParent() { + spanContext = SpanContext.getInvalid(); + return this; + } + + @Override + public NoopSpanBuilder addLink(SpanContext spanContext) { + return this; + } + + @Override + public NoopSpanBuilder addLink(SpanContext spanContext, Attributes attributes) { + return this; + } + + @Override + public NoopSpanBuilder setAttribute(String key, String value) { + return this; + } + + @Override + public NoopSpanBuilder setAttribute(String key, long value) { + return this; + } + + @Override + public NoopSpanBuilder setAttribute(String key, double value) { + return this; + } + + @Override + public NoopSpanBuilder setAttribute(String key, boolean value) { + return this; + } + + @Override + public NoopSpanBuilder setAttribute(AttributeKey key, T value) { + return this; + } + + @Override + public NoopSpanBuilder setAllAttributes(Attributes attributes) { + return this; + } + + @Override + public NoopSpanBuilder setSpanKind(SpanKind spanKind) { + return this; + } + + @Override + public NoopSpanBuilder setStartTimestamp(long startTimestamp, TimeUnit unit) { + return this; + } + + @Override + public T startAndCall(SpanCallable spanCallable) throws E { + return spanCallable.callInSpan(); + } + + @Override + public T startAndCall( + SpanCallable spanCallable, BiConsumer handleException) throws E { + return spanCallable.callInSpan(); + } + + @Override + public void startAndRun(SpanRunnable runnable) throws E { + runnable.runInSpan(); + } + + @Override + public void startAndRun( + SpanRunnable runnable, BiConsumer handleException) throws E { + runnable.runInSpan(); + } + + private NoopSpanBuilder() {} + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedDefaultTracerBuilder.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedDefaultTracerBuilder.java new file mode 100644 index 00000000000..20469674ae5 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedDefaultTracerBuilder.java @@ -0,0 +1,32 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.trace; + +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.api.trace.TracerBuilder; + +final class ExtendedDefaultTracerBuilder implements TracerBuilder { + private static final ExtendedDefaultTracerBuilder INSTANCE = new ExtendedDefaultTracerBuilder(); + + static TracerBuilder getInstance() { + return INSTANCE; + } + + @Override + public TracerBuilder setSchemaUrl(String schemaUrl) { + return this; + } + + @Override + public TracerBuilder setInstrumentationVersion(String instrumentationScopeVersion) { + return this; + } + + @Override + public Tracer build() { + return ExtendedDefaultTracer.getNoop(); + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedDefaultTracerProvider.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedDefaultTracerProvider.java new file mode 100644 index 00000000000..b7bd2133ad7 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedDefaultTracerProvider.java @@ -0,0 +1,38 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.trace; + +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.api.trace.TracerBuilder; +import io.opentelemetry.api.trace.TracerProvider; +import javax.annotation.concurrent.ThreadSafe; + +@ThreadSafe +public class ExtendedDefaultTracerProvider implements TracerProvider { + + private static final TracerProvider INSTANCE = new ExtendedDefaultTracerProvider(); + + public static TracerProvider getNoop() { + return INSTANCE; + } + + @Override + public Tracer get(String instrumentationScopeName) { + return ExtendedDefaultTracer.getNoop(); + } + + @Override + public Tracer get(String instrumentationScopeName, String instrumentationScopeVersion) { + return ExtendedDefaultTracer.getNoop(); + } + + @Override + public TracerBuilder tracerBuilder(String instrumentationScopeName) { + return ExtendedDefaultTracerBuilder.getInstance(); + } + + private ExtendedDefaultTracerProvider() {} +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedSpanBuilder.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedSpanBuilder.java new file mode 100644 index 00000000000..8474095f734 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedSpanBuilder.java @@ -0,0 +1,149 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.trace; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanBuilder; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.propagation.ContextPropagators; +import java.time.Instant; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; + +/** Extended {@link SpanBuilder} with experimental APIs. */ +public interface ExtendedSpanBuilder extends SpanBuilder { + + /** + * Extract a span context from the given carrier and set it as parent of the span for {@link + * #startAndCall(SpanCallable)} and {@link #startAndRun(SpanRunnable)}. + * + *

The span context will be extracted from the carrier, which you usually get from + * HTTP headers of the metadata of a message you're processing. + * + * @param propagators provide the propagators from {@link OpenTelemetry#getPropagators()} + * @param carrier the string map where to extract the span context from + */ + ExtendedSpanBuilder setParentFrom(ContextPropagators propagators, Map carrier); + + /** + * Runs the given {@link SpanCallable} inside of the span created by the given {@link + * SpanBuilder}. The span will be ended at the end of the {@link SpanCallable}. + * + *

If an exception is thrown by the {@link SpanCallable}, the span will be marked as error, and + * the exception will be recorded. + * + * @param spanCallable the {@link SpanCallable} to call + * @param the type of the result + * @param the type of the exception + * @return the result of the {@link SpanCallable} + */ + T startAndCall(SpanCallable spanCallable) throws E; + + /** + * Runs the given {@link SpanCallable} inside of the span created by the given {@link + * SpanBuilder}. The span will be ended at the end of the {@link SpanCallable}. + * + *

If an exception is thrown by the {@link SpanCallable}, the handleException + * consumer will be called, giving you the opportunity to handle the exception and span in a + * custom way, e.g. not marking the span as error. + * + * @param spanCallable the {@link SpanCallable} to call + * @param handleException the consumer to call when an exception is thrown + * @param the type of the result + * @param the type of the exception + * @return the result of the {@link SpanCallable} + */ + T startAndCall( + SpanCallable spanCallable, BiConsumer handleException) throws E; + + /** + * Runs the given {@link SpanRunnable} inside of the span created by the given {@link + * SpanBuilder}. The span will be ended at the end of the {@link SpanRunnable}. + * + *

If an exception is thrown by the {@link SpanRunnable}, the span will be marked as error, and + * the exception will be recorded. + * + * @param runnable the {@link SpanRunnable} to run + * @param the type of the exception + */ + void startAndRun(SpanRunnable runnable) throws E; + + /** + * Runs the given {@link SpanRunnable} inside of the span created by the given {@link + * SpanBuilder}. The span will be ended at the end of the {@link SpanRunnable}. + * + *

If an exception is thrown by the {@link SpanRunnable}, the handleException + * consumer will be called, giving you the opportunity to handle the exception and span in a + * custom way, e.g. not marking the span as error. + * + * @param runnable the {@link SpanRunnable} to run + * @param the type of the exception + */ + void startAndRun( + SpanRunnable runnable, BiConsumer handleException) throws E; + + /** {@inheritDoc} */ + @Override + ExtendedSpanBuilder setParent(Context context); + + /** {@inheritDoc} */ + @Override + ExtendedSpanBuilder setNoParent(); + + /** {@inheritDoc} */ + @Override + ExtendedSpanBuilder addLink(SpanContext spanContext); + + /** {@inheritDoc} */ + @Override + ExtendedSpanBuilder addLink(SpanContext spanContext, Attributes attributes); + + /** {@inheritDoc} */ + @Override + ExtendedSpanBuilder setAttribute(String key, String value); + + /** {@inheritDoc} */ + @Override + ExtendedSpanBuilder setAttribute(String key, long value); + + /** {@inheritDoc} */ + @Override + ExtendedSpanBuilder setAttribute(String key, double value); + + /** {@inheritDoc} */ + @Override + ExtendedSpanBuilder setAttribute(String key, boolean value); + + /** {@inheritDoc} */ + @Override + ExtendedSpanBuilder setAttribute(AttributeKey key, T value); + + /** {@inheritDoc} */ + @Override + default ExtendedSpanBuilder setAllAttributes(Attributes attributes) { + return (ExtendedSpanBuilder) SpanBuilder.super.setAllAttributes(attributes); + } + + /** {@inheritDoc} */ + @Override + ExtendedSpanBuilder setSpanKind(SpanKind spanKind); + + /** {@inheritDoc} */ + @Override + ExtendedSpanBuilder setStartTimestamp(long startTimestamp, TimeUnit unit); + + /** {@inheritDoc} */ + @Override + default ExtendedSpanBuilder setStartTimestamp(Instant startTimestamp) { + return (ExtendedSpanBuilder) SpanBuilder.super.setStartTimestamp(startTimestamp); + } +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedTracer.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedTracer.java new file mode 100644 index 00000000000..c37ba913eb1 --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/ExtendedTracer.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.trace; + +import io.opentelemetry.api.trace.Tracer; + +/** Extended {@link Tracer} with experimental APIs. */ +public interface ExtendedTracer extends Tracer { + + /** + * Returns {@code true} if the tracer is enabled. + * + *

This allows callers to avoid unnecessary compute when nothing is consuming the data. Because + * the response is subject to change over the application, callers should call this before each + * call to {@link #spanBuilder(String)}. + */ + default boolean isEnabled() { + return true; + } + + @Override + ExtendedSpanBuilder spanBuilder(String spanName); +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/SpanCallable.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/SpanCallable.java new file mode 100644 index 00000000000..881e1bd541d --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/SpanCallable.java @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.trace; + +/** + * An interface for creating a lambda that is wrapped in a span, returns a value, and that may + * throw. + * + * @param Thrown exception type. + */ +@FunctionalInterface +public interface SpanCallable { + T callInSpan() throws E; +} diff --git a/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/SpanRunnable.java b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/SpanRunnable.java new file mode 100644 index 00000000000..9f245f340bf --- /dev/null +++ b/api/incubator/src/main/java/io/opentelemetry/api/incubator/trace/SpanRunnable.java @@ -0,0 +1,16 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.trace; + +/** + * An interface for creating a lambda that is wrapped in a span and that may throw. + * + * @param Thrown exception type. + */ +@FunctionalInterface +public interface SpanRunnable { + void runInSpan() throws E; +} diff --git a/api/incubator/src/main/resources/META-INF/native-image/io.opentelemetry/opentelemetry-api/reflect-config.json b/api/incubator/src/main/resources/META-INF/native-image/io.opentelemetry/opentelemetry-api/reflect-config.json new file mode 100644 index 00000000000..d9abd56c422 --- /dev/null +++ b/api/incubator/src/main/resources/META-INF/native-image/io.opentelemetry/opentelemetry-api/reflect-config.json @@ -0,0 +1,38 @@ +[ + { + "methods": [ + { + "name": "getNoop", + "parameterTypes": [] + } + ], + "name": "io.opentelemetry.api.incubator.logs.ExtendedDefaultLoggerProvider" + }, + { + "methods": [ + { + "name": "getNoop", + "parameterTypes": [] + } + ], + "name": "io.opentelemetry.api.incubator.metrics.ExtendedDefaultMeterProvider" + }, + { + "methods": [ + { + "name": "getNoop", + "parameterTypes": [] + } + ], + "name": "io.opentelemetry.api.incubator.trace.ExtendedDefaultTracerProvider" + }, + { + "methods": [ + { + "name": "getNoop", + "parameterTypes": [] + } + ], + "name": "io.opentelemetry.api.incubator.ExtendedDefaultOpenTelemetry" + } +] diff --git a/api/incubator/src/test/java/io/opentelemetry/api/incubator/ConfigProviderTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/ConfigProviderTest.java new file mode 100644 index 00000000000..9c9e4bf41e1 --- /dev/null +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/ConfigProviderTest.java @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.incubator.config.ConfigProvider; +import org.junit.jupiter.api.Test; + +class ConfigProviderTest { + + @Test + void noopEquality() { + ConfigProvider noop = ConfigProvider.noop(); + assertThat(ConfigProvider.noop()).isSameAs(noop); + } +} diff --git a/api/incubator/src/test/java/io/opentelemetry/api/incubator/ExtendedOpenTelemetryTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/ExtendedOpenTelemetryTest.java new file mode 100644 index 00000000000..4903d00c2e6 --- /dev/null +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/ExtendedOpenTelemetryTest.java @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.incubator.logs.ExtendedDefaultLoggerProvider; +import io.opentelemetry.api.incubator.logs.ExtendedLogger; +import io.opentelemetry.api.incubator.metrics.ExtendedDefaultMeterProvider; +import io.opentelemetry.api.incubator.metrics.ExtendedLongCounterBuilder; +import io.opentelemetry.api.incubator.trace.ExtendedDefaultTracerProvider; +import io.opentelemetry.api.incubator.trace.ExtendedTracer; +import io.opentelemetry.api.logs.LoggerProvider; +import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.api.testing.internal.AbstractOpenTelemetryTest; +import io.opentelemetry.api.trace.TracerProvider; +import io.opentelemetry.context.propagation.ContextPropagators; +import org.junit.jupiter.api.Test; + +class ExtendedOpenTelemetryTest extends AbstractOpenTelemetryTest { + + @Override + protected TracerProvider getTracerProvider() { + return ExtendedDefaultTracerProvider.getNoop(); + } + + @Override + protected MeterProvider getMeterProvider() { + return ExtendedDefaultMeterProvider.getNoop(); + } + + @Override + protected LoggerProvider getLoggerProvider() { + return ExtendedDefaultLoggerProvider.getNoop(); + } + + @Test + void incubatingApiIsLoaded() { + assertIsExtended(OpenTelemetry.noop()); + assertIsExtended(OpenTelemetry.propagating(ContextPropagators.noop())); + } + + private static void assertIsExtended(OpenTelemetry openTelemetry) { + assertThat(openTelemetry.getMeter("test").counterBuilder("test")) + .isInstanceOf(ExtendedLongCounterBuilder.class); + assertThat(openTelemetry.getLogsBridge().get("test")).isInstanceOf(ExtendedLogger.class); + assertThat(openTelemetry.getTracer("test")).isInstanceOf(ExtendedTracer.class); + } +} diff --git a/api/incubator/src/test/java/io/opentelemetry/api/incubator/config/GlobalConfigProviderTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/config/GlobalConfigProviderTest.java new file mode 100644 index 00000000000..ecd837a5298 --- /dev/null +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/config/GlobalConfigProviderTest.java @@ -0,0 +1,44 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.config; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +class GlobalConfigProviderTest { + + @BeforeAll + static void beforeClass() { + GlobalConfigProvider.resetForTest(); + } + + @AfterEach + void after() { + GlobalConfigProvider.resetForTest(); + } + + @Test + void setAndGet() { + assertThat(GlobalConfigProvider.get()).isEqualTo(ConfigProvider.noop()); + ConfigProvider configProvider = DeclarativeConfigProperties::empty; + GlobalConfigProvider.set(configProvider); + assertThat(GlobalConfigProvider.get()).isSameAs(configProvider); + } + + @Test + void setThenSet() { + ConfigProvider configProvider = DeclarativeConfigProperties::empty; + GlobalConfigProvider.set(configProvider); + assertThatThrownBy(() -> GlobalConfigProvider.set(configProvider)) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("GlobalConfigProvider.set has already been called") + .hasStackTraceContaining("setThenSet"); + } +} diff --git a/api/incubator/src/test/java/io/opentelemetry/api/incubator/config/InstrumentationConfigUtilTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/config/InstrumentationConfigUtilTest.java new file mode 100644 index 00000000000..94da0f2d10b --- /dev/null +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/config/InstrumentationConfigUtilTest.java @@ -0,0 +1,169 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.config; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.google.common.collect.ImmutableMap; +import io.opentelemetry.sdk.extension.incubator.fileconfig.DeclarativeConfiguration; +import io.opentelemetry.sdk.extension.incubator.fileconfig.SdkConfigProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.YamlDeclarativeConfigProperties; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfigurationModel; +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import org.junit.jupiter.api.Test; + +class InstrumentationConfigUtilTest { + + /** + * See kitchen-sink.yaml. + */ + private static final String kitchenSinkInstrumentationConfig = + "instrumentation:\n" + + " general:\n" + + " peer:\n" + + " service_mapping:\n" + + " - peer: 1.2.3.4\n" + + " service: FooService\n" + + " - peer: 2.3.4.5\n" + + " service: BarService\n" + + " http:\n" + + " client:\n" + + " request_captured_headers:\n" + + " - client-request-header1\n" + + " - client-request-header2\n" + + " response_captured_headers:\n" + + " - client-response-header1\n" + + " - client-response-header2\n" + + " server:\n" + + " request_captured_headers:\n" + + " - server-request-header1\n" + + " - server-request-header2\n" + + " response_captured_headers:\n" + + " - server-response-header1\n" + + " - server-response-header2\n" + + " java:\n" + + " example:\n" + + " property: \"value\""; + + private static final ConfigProvider kitchenSinkConfigProvider = + toConfigProvider(kitchenSinkInstrumentationConfig); + private static final ConfigProvider emptyInstrumentationConfigProvider = + toConfigProvider("instrumentation:\n"); + private static final ConfigProvider emptyGeneralConfigProvider = + toConfigProvider("instrumentation:\n general:\n"); + private static final ConfigProvider emptyHttpConfigProvider = + toConfigProvider("instrumentation:\n general:\n http:\n"); + + private static ConfigProvider toConfigProvider(String configYaml) { + OpenTelemetryConfigurationModel configuration = + DeclarativeConfiguration.parse( + new ByteArrayInputStream(configYaml.getBytes(StandardCharsets.UTF_8))); + return SdkConfigProvider.create(configuration); + } + + @Test + void peerServiceMapping() { + assertThat(InstrumentationConfigUtil.peerServiceMapping(kitchenSinkConfigProvider)) + .isEqualTo(ImmutableMap.of("1.2.3.4", "FooService", "2.3.4.5", "BarService")); + assertThat(InstrumentationConfigUtil.peerServiceMapping(emptyInstrumentationConfigProvider)) + .isNull(); + assertThat(InstrumentationConfigUtil.peerServiceMapping(emptyGeneralConfigProvider)).isNull(); + assertThat(InstrumentationConfigUtil.peerServiceMapping(emptyHttpConfigProvider)).isNull(); + } + + @Test + void httpClientRequestCapturedHeaders() { + assertThat( + InstrumentationConfigUtil.httpClientRequestCapturedHeaders(kitchenSinkConfigProvider)) + .isEqualTo(Arrays.asList("client-request-header1", "client-request-header2")); + assertThat( + InstrumentationConfigUtil.httpClientRequestCapturedHeaders( + emptyInstrumentationConfigProvider)) + .isNull(); + assertThat( + InstrumentationConfigUtil.httpClientRequestCapturedHeaders(emptyGeneralConfigProvider)) + .isNull(); + assertThat(InstrumentationConfigUtil.httpClientRequestCapturedHeaders(emptyHttpConfigProvider)) + .isNull(); + } + + @Test + void httpClientResponseCapturedHeaders() { + assertThat( + InstrumentationConfigUtil.httpClientResponseCapturedHeaders(kitchenSinkConfigProvider)) + .isEqualTo(Arrays.asList("client-response-header1", "client-response-header2")); + assertThat( + InstrumentationConfigUtil.httpClientResponseCapturedHeaders( + emptyInstrumentationConfigProvider)) + .isNull(); + assertThat( + InstrumentationConfigUtil.httpClientResponseCapturedHeaders(emptyGeneralConfigProvider)) + .isNull(); + assertThat(InstrumentationConfigUtil.httpClientResponseCapturedHeaders(emptyHttpConfigProvider)) + .isNull(); + } + + @Test + void httpServerRequestCapturedHeaders() { + assertThat( + InstrumentationConfigUtil.httpServerRequestCapturedHeaders(kitchenSinkConfigProvider)) + .isEqualTo(Arrays.asList("server-request-header1", "server-request-header2")); + assertThat( + InstrumentationConfigUtil.httpServerRequestCapturedHeaders( + emptyInstrumentationConfigProvider)) + .isNull(); + assertThat( + InstrumentationConfigUtil.httpServerRequestCapturedHeaders(emptyGeneralConfigProvider)) + .isNull(); + assertThat(InstrumentationConfigUtil.httpServerRequestCapturedHeaders(emptyHttpConfigProvider)) + .isNull(); + } + + @Test + void httpServerResponseCapturedHeaders() { + assertThat( + InstrumentationConfigUtil.httpSeverResponseCapturedHeaders(kitchenSinkConfigProvider)) + .isEqualTo(Arrays.asList("server-response-header1", "server-response-header2")); + assertThat( + InstrumentationConfigUtil.httpSeverResponseCapturedHeaders( + emptyInstrumentationConfigProvider)) + .isNull(); + assertThat( + InstrumentationConfigUtil.httpSeverResponseCapturedHeaders(emptyGeneralConfigProvider)) + .isNull(); + assertThat(InstrumentationConfigUtil.httpSeverResponseCapturedHeaders(emptyHttpConfigProvider)) + .isNull(); + } + + @Test + void javaInstrumentationConfig() { + assertThat( + InstrumentationConfigUtil.javaInstrumentationConfig( + kitchenSinkConfigProvider, "example")) + .isNotNull() + .isInstanceOfSatisfying( + YamlDeclarativeConfigProperties.class, + exampleConfig -> + assertThat(exampleConfig.toMap()).isEqualTo(ImmutableMap.of("property", "value"))); + assertThat( + InstrumentationConfigUtil.javaInstrumentationConfig(kitchenSinkConfigProvider, "foo")) + .isNull(); + assertThat( + InstrumentationConfigUtil.javaInstrumentationConfig( + emptyInstrumentationConfigProvider, "example")) + .isNull(); + assertThat( + InstrumentationConfigUtil.javaInstrumentationConfig( + emptyGeneralConfigProvider, "example")) + .isNull(); + assertThat( + InstrumentationConfigUtil.javaInstrumentationConfig(emptyHttpConfigProvider, "example")) + .isNull(); + } +} diff --git a/api/incubator/src/test/java/io/opentelemetry/api/incubator/logs/ExtendedDefaultLoggerTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/logs/ExtendedDefaultLoggerTest.java new file mode 100644 index 00000000000..e92a3c160a6 --- /dev/null +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/logs/ExtendedDefaultLoggerTest.java @@ -0,0 +1,37 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.logs; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.logs.Logger; +import io.opentelemetry.api.logs.LoggerProvider; +import io.opentelemetry.api.testing.internal.AbstractDefaultLoggerTest; +import org.junit.jupiter.api.Test; + +class ExtendedDefaultLoggerTest extends AbstractDefaultLoggerTest { + + @Override + protected LoggerProvider getLoggerProvider() { + return ExtendedDefaultLoggerProvider.getNoop(); + } + + @Override + protected Logger getLogger() { + return ExtendedDefaultLogger.getNoop(); + } + + @Test + void incubatingApiIsLoaded() { + Logger logger = LoggerProvider.noop().get("test"); + + assertThat(logger).isInstanceOf(ExtendedLogger.class); + ExtendedLogRecordBuilder builder = (ExtendedLogRecordBuilder) logger.logRecordBuilder(); + assertThat(builder).isInstanceOf(ExtendedLogRecordBuilder.class); + assertThat(builder.setBody(Value.of(0))).isSameAs(builder); + } +} diff --git a/api/incubator/src/test/java/io/opentelemetry/api/incubator/logs/ExtendedLogsBridgeApiUsageTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/logs/ExtendedLogsBridgeApiUsageTest.java new file mode 100644 index 00000000000..2d9c494ee3e --- /dev/null +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/logs/ExtendedLogsBridgeApiUsageTest.java @@ -0,0 +1,79 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.logs; + +import static io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder.nameEquals; +import static io.opentelemetry.sdk.logs.internal.LoggerConfig.disabled; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.logs.SdkLoggerProvider; +import io.opentelemetry.sdk.logs.SdkLoggerProviderBuilder; +import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor; +import io.opentelemetry.sdk.logs.internal.SdkLoggerProviderUtil; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.exporter.InMemoryLogRecordExporter; +import java.util.Random; +import org.junit.jupiter.api.Test; + +/** Demonstrating usage of extended Logs Bridge API. */ +class ExtendedLogsBridgeApiUsageTest { + + @Test + void loggerEnabled() { + // Setup SdkLoggerProvider + InMemoryLogRecordExporter exporter = InMemoryLogRecordExporter.create(); + SdkLoggerProviderBuilder loggerProviderBuilder = + SdkLoggerProvider.builder() + // Default resource used for demonstration purposes + .setResource(Resource.getDefault()) + // In-memory exporter used for demonstration purposes + .addLogRecordProcessor(SimpleLogRecordProcessor.create(exporter)); + // Disable loggerB + SdkLoggerProviderUtil.addLoggerConfiguratorCondition( + loggerProviderBuilder, nameEquals("loggerB"), disabled()); + SdkLoggerProvider loggerProvider = loggerProviderBuilder.build(); + + // Create loggerA and loggerB + ExtendedLogger loggerA = (ExtendedLogger) loggerProvider.get("loggerA"); + ExtendedLogger loggerB = (ExtendedLogger) loggerProvider.get("loggerB"); + + // Check if logger is enabled before emitting log and avoid unnecessary computation + if (loggerA.isEnabled()) { + loggerA + .logRecordBuilder() + .setBody("hello world!") + .setAllAttributes(Attributes.builder().put("result", flipCoin()).build()) + .emit(); + } + if (loggerB.isEnabled()) { + loggerB + .logRecordBuilder() + .setBody("hello world!") + .setAllAttributes(Attributes.builder().put("result", flipCoin()).build()) + .emit(); + } + + // loggerA is enabled, loggerB is disabled + assertThat(loggerA.isEnabled()).isTrue(); + assertThat(loggerB.isEnabled()).isFalse(); + + // Collected data only consists of logs from loggerA. Note, loggerB's logs would be + // omitted from the results even if logs were emitted. The check if enabled simply avoids + // unnecessary computation. + assertThat(exporter.getFinishedLogRecordItems()) + .allSatisfy( + logRecordData -> + assertThat(logRecordData.getInstrumentationScopeInfo().getName()) + .isEqualTo("loggerA")); + } + + private static final Random random = new Random(); + + private static String flipCoin() { + return random.nextBoolean() ? "heads" : "tails"; + } +} diff --git a/api/incubator/src/test/java/io/opentelemetry/api/incubator/metrics/ExtendedDefaultMeterTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/metrics/ExtendedDefaultMeterTest.java new file mode 100644 index 00000000000..a56a8740b04 --- /dev/null +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/metrics/ExtendedDefaultMeterTest.java @@ -0,0 +1,70 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.api.testing.internal.AbstractDefaultMeterTest; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +class ExtendedDefaultMeterTest extends AbstractDefaultMeterTest { + + @Override + protected Meter getMeter() { + return ExtendedDefaultMeter.getNoop(); + } + + @Override + protected MeterProvider getMeterProvider() { + return ExtendedDefaultMeterProvider.getNoop(); + } + + @Test + public void incubatingApiIsLoaded() { + Meter meter = MeterProvider.noop().get("test"); + assertThat(meter).isSameAs(OpenTelemetry.noop().getMeter("test")); + + Assertions.assertThat(meter.gaugeBuilder("test").ofLongs()) + .isInstanceOf(ExtendedLongGaugeBuilder.class); + Assertions.assertThat(meter.gaugeBuilder("test").ofLongs().build()) + .isInstanceOf(ExtendedLongGauge.class); + Assertions.assertThat(meter.gaugeBuilder("test")) + .isInstanceOf(ExtendedDoubleGaugeBuilder.class); + Assertions.assertThat(meter.gaugeBuilder("test").build()) + .isInstanceOf(ExtendedDoubleGauge.class); + + Assertions.assertThat(meter.histogramBuilder("test").ofLongs()) + .isInstanceOf(ExtendedLongHistogramBuilder.class); + Assertions.assertThat(meter.histogramBuilder("test").ofLongs().build()) + .isInstanceOf(ExtendedLongHistogram.class); + Assertions.assertThat(meter.histogramBuilder("test")) + .isInstanceOf(ExtendedDoubleHistogramBuilder.class); + Assertions.assertThat(meter.histogramBuilder("test").build()) + .isInstanceOf(ExtendedDoubleHistogram.class); + + Assertions.assertThat(meter.counterBuilder("test")) + .isInstanceOf(ExtendedLongCounterBuilder.class); + Assertions.assertThat(meter.counterBuilder("test").build()) + .isInstanceOf(ExtendedLongCounter.class); + Assertions.assertThat(meter.counterBuilder("test").ofDoubles()) + .isInstanceOf(ExtendedDoubleCounterBuilder.class); + Assertions.assertThat(meter.counterBuilder("test").ofDoubles().build()) + .isInstanceOf(ExtendedDoubleCounter.class); + + Assertions.assertThat(meter.upDownCounterBuilder("test")) + .isInstanceOf(ExtendedLongUpDownCounterBuilder.class); + Assertions.assertThat(meter.upDownCounterBuilder("test").build()) + .isInstanceOf(ExtendedLongUpDownCounter.class); + Assertions.assertThat(meter.upDownCounterBuilder("test").ofDoubles()) + .isInstanceOf(ExtendedDoubleUpDownCounterBuilder.class); + Assertions.assertThat(meter.upDownCounterBuilder("test").ofDoubles().build()) + .isInstanceOf(ExtendedDoubleUpDownCounter.class); + } +} diff --git a/api/incubator/src/test/java/io/opentelemetry/api/incubator/metrics/ExtendedMetricsApiUsageTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/metrics/ExtendedMetricsApiUsageTest.java new file mode 100644 index 00000000000..09cda254e18 --- /dev/null +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/metrics/ExtendedMetricsApiUsageTest.java @@ -0,0 +1,150 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.metrics; + +import static io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder.nameEquals; +import static io.opentelemetry.sdk.metrics.internal.MeterConfig.disabled; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.sdk.metrics.InstrumentSelector; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; +import io.opentelemetry.sdk.metrics.View; +import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; +import java.util.Random; +import org.junit.jupiter.api.Test; + +/** Demonstrating usage of extended Metrics API. */ +class ExtendedMetricsApiUsageTest { + + @Test + void meterEnabled() { + // Setup SdkMeterProvider + InMemoryMetricReader reader = InMemoryMetricReader.create(); + SdkMeterProviderBuilder meterProviderBuilder = + SdkMeterProvider.builder() + // Default resource used for demonstration purposes + .setResource(Resource.getDefault()) + // In-memory reader used for demonstration purposes + .registerMetricReader(reader); + // Disable meterB + SdkMeterProviderUtil.addMeterConfiguratorCondition( + meterProviderBuilder, nameEquals("meterB"), disabled()); + SdkMeterProvider meterProvider = meterProviderBuilder.build(); + + // Create meterA and meterB, and corresponding instruments + Meter meterA = meterProvider.get("meterA"); + Meter meterB = meterProvider.get("meterB"); + ExtendedDoubleHistogram histogramA = + (ExtendedDoubleHistogram) meterA.histogramBuilder("histogramA").build(); + ExtendedDoubleHistogram histogramB = + (ExtendedDoubleHistogram) meterB.histogramBuilder("histogramB").build(); + + // Check if instrument is enabled before recording measurement and avoid unnecessary computation + if (histogramA.isEnabled()) { + histogramA.record(1.0, Attributes.builder().put("result", flipCoin()).build()); + } + if (histogramB.isEnabled()) { + histogramA.record(1.0, Attributes.builder().put("result", flipCoin()).build()); + } + + // histogramA is enabled since meterA is enabled, histogramB is disabled since meterB is + // disabled + assertThat(histogramA.isEnabled()).isTrue(); + assertThat(histogramB.isEnabled()).isFalse(); + + // Collected data only consists of metrics from meterA. Note, meterB's histogramB would be + // omitted from the results even if values were recorded. The check if enabled simply avoids + // unnecessary computation. + assertThat(reader.collectAllMetrics()) + .allSatisfy( + metric -> + assertThat(metric.getInstrumentationScopeInfo().getName()).isEqualTo("meterA")); + } + + private static final Random random = new Random(); + + private static String flipCoin() { + return random.nextBoolean() ? "heads" : "tails"; + } + + @Test + void attributesAdvice() { + // Setup SdkMeterProvider + InMemoryMetricReader reader = InMemoryMetricReader.create(); + SdkMeterProvider meterProvider = + SdkMeterProvider.builder() + // Default resource used for demonstration purposes + .setResource(Resource.getDefault()) + // In-memory reader used for demonstration purposes + .registerMetricReader(reader) + // Register a view which indicates that for counter1, attributes key1, key2 should be + // retained + .registerView( + InstrumentSelector.builder().setName("counter1").build(), + View.builder().setAttributeFilter(ImmutableSet.of("key1", "key2")).build()) + .build(); + + // Get a Meter for a scope + Meter meter = meterProvider.get("org.foo.my-scope"); + + // To apply attribute advice, cast the instrument builder to appropriate + // Extended{Instrument}Builder, and call setAttributeAdvice + // Here we create counter1 and counter2, both configured to only retain attribute key1. counter1 + // has a view configured which overrides this and retains key1, key2. + LongCounter counter1 = + ((ExtendedLongCounterBuilder) meter.counterBuilder("counter1")) + .setAttributesAdvice(ImmutableList.of(AttributeKey.stringKey("key1"))) + .build(); + LongCounter counter2 = + ((ExtendedLongCounterBuilder) meter.counterBuilder("counter2")) + .setAttributesAdvice(ImmutableList.of(AttributeKey.stringKey("key1"))) + .build(); + + // Record data with attribute key1, key2 + counter1.add(1, Attributes.builder().put("key1", "value1").put("key2", "value2").build()); + counter2.add(1, Attributes.builder().put("key1", "value1").put("key2", "value2").build()); + + // Verify that counter1 has both key1, key2 since view overrides the attribute advice + // Verify that counter2 only has key1, since attribute advice causes key2 to be dropped by + // default + assertThat(reader.collectAllMetrics()) + .satisfiesExactlyInAnyOrder( + metricData -> + assertThat(metricData) + .hasName("counter1") + .hasLongSumSatisfying( + sumAssert -> + sumAssert.hasPointsSatisfying( + point -> + point + .hasValue(1L) + .hasAttributes( + Attributes.builder() + .put("key1", "value1") + .put("key2", "value2") + .build()))), + metricData -> + assertThat(metricData) + .hasName("counter2") + .hasLongSumSatisfying( + sumAssert -> + sumAssert.hasPointsSatisfying( + point -> + point + .hasValue(1L) + .hasAttributes( + Attributes.builder().put("key1", "value1").build())))); + } +} diff --git a/api/incubator/src/test/java/io/opentelemetry/api/incubator/propagation/CaseInsensitiveMapTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/propagation/CaseInsensitiveMapTest.java new file mode 100644 index 00000000000..ac9feee91b4 --- /dev/null +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/propagation/CaseInsensitiveMapTest.java @@ -0,0 +1,69 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.propagation; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; + +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.Test; + +class CaseInsensitiveMapTest { + + @Test + void createByConstructor() { + Map map = new HashMap<>(); + map.put("Key1", "test"); + map.put("Key2", "test2"); + + CaseInsensitiveMap caseInsensitiveMap = new CaseInsensitiveMap(map); + + Map standardMap = new HashMap<>(); + standardMap.put("key1", "test"); + standardMap.put("key2", "test2"); + + assertThat(caseInsensitiveMap).isEqualTo(standardMap); + } + + @Test + void putAll() { + CaseInsensitiveMap caseInsensitiveMap = new CaseInsensitiveMap(); + Map standardMap = new HashMap<>(); + standardMap.put("key1", "test"); + standardMap.put("key2", "test2"); + caseInsensitiveMap.putAll(standardMap); + assertThat(caseInsensitiveMap).isEqualTo(standardMap); + } + + @Test + void putIfAbsent() { + CaseInsensitiveMap caseInsensitiveMap = new CaseInsensitiveMap(); + caseInsensitiveMap.putIfAbsent("key1", "test"); + assertThat(caseInsensitiveMap.get("KEY1")).isEqualTo("test"); + caseInsensitiveMap.putIfAbsent("key1", "nope"); + assertThat(caseInsensitiveMap.get("KEY1")).isEqualTo("test"); + } + + @Test + void createByConstructorWithNullMap() { + CaseInsensitiveMap caseInsensitiveMap = new CaseInsensitiveMap(null); + assertThat(caseInsensitiveMap).isEmpty(); + } + + @Test + void caseInsensitivity() { + CaseInsensitiveMap caseInsensitiveMap = new CaseInsensitiveMap(null); + + assertThat(caseInsensitiveMap).isEmpty(); + + caseInsensitiveMap.put("KEY1", "test1"); + caseInsensitiveMap.put("KEY2", "test2"); + assertThat(caseInsensitiveMap.get("key1")).isEqualTo("test1"); + assertThat(caseInsensitiveMap.get("key2")).isEqualTo("test2"); + assertThat(caseInsensitiveMap.get("kEy2")).isEqualTo("test2"); + assertThat(caseInsensitiveMap.get("KEY2")).isEqualTo("test2"); + } +} diff --git a/api/incubator/src/test/java/io/opentelemetry/api/incubator/propagation/ExtendedContextPropagatorsUsageTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/propagation/ExtendedContextPropagatorsUsageTest.java new file mode 100644 index 00000000000..d3bacb21483 --- /dev/null +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/propagation/ExtendedContextPropagatorsUsageTest.java @@ -0,0 +1,76 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.propagation; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; + +import com.google.common.collect.ImmutableMap; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.TraceFlags; +import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.context.propagation.TextMapPropagator; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.util.Map; +import org.junit.jupiter.api.Test; + +/** Demonstrating usage of extended ContextPropagators API. */ +class ExtendedContextPropagatorsUsageTest { + + @Test + void getTextMapPropagationContextUsage() { + // Setup Propagators + ContextPropagators propagators = + ContextPropagators.create( + TextMapPropagator.composite(W3CTraceContextPropagator.getInstance())); + + // Setup SdkTracerProvider + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder().setSampler(Sampler.alwaysOn()).build(); + + // Get a Tracer for a scope + Tracer tracer = tracerProvider.get("org.foo.my-scope"); + + try (Scope scope = tracer.spanBuilder("span name").startSpan().makeCurrent()) { + // Simplify context injection by getting a text map of the key/value pairs to inject + Map textMap = + ExtendedContextPropagators.getTextMapPropagationContext(propagators); + // Assert textmap contains the "traceparent" field as injected by W3CTraceContextPropagator + assertThat(textMap) + .hasEntrySatisfying("traceparent", value -> assertThat(value).isNotEmpty()); + } + } + + @Test + void extractTextMapPropagationContextUsage() { + // Setup Propagators + ContextPropagators propagators = + ContextPropagators.create( + TextMapPropagator.composite(W3CTraceContextPropagator.getInstance())); + + // Setup map with context key/value pairs + Map contextCarrier = + ImmutableMap.of("traceparent", "00-713bde54561be5ded62545d0e7369d4a-3c3a5ddefce9c1e1-01"); + + // Extract context from the carrier map + Context context = + ExtendedContextPropagators.extractTextMapPropagationContext(contextCarrier, propagators); + // Assert SpanContext is properly extracted from the W3cTraceContextPropagator + assertThat(Span.fromContext(context).getSpanContext()) + .isEqualTo( + SpanContext.createFromRemoteParent( + "713bde54561be5ded62545d0e7369d4a", + "3c3a5ddefce9c1e1", + TraceFlags.getSampled(), + TraceState.getDefault())); + } +} diff --git a/extensions/incubator/src/test/java/io/opentelemetry/extension/incubator/propagation/PassThroughPropagatorTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/propagation/PassThroughPropagatorTest.java similarity index 98% rename from extensions/incubator/src/test/java/io/opentelemetry/extension/incubator/propagation/PassThroughPropagatorTest.java rename to api/incubator/src/test/java/io/opentelemetry/api/incubator/propagation/PassThroughPropagatorTest.java index de93164c004..008694252b8 100644 --- a/extensions/incubator/src/test/java/io/opentelemetry/extension/incubator/propagation/PassThroughPropagatorTest.java +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/propagation/PassThroughPropagatorTest.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.extension.incubator.propagation; +package io.opentelemetry.api.incubator.propagation; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; diff --git a/api/incubator/src/test/java/io/opentelemetry/api/incubator/trace/ExtendedDefaultTracerTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/trace/ExtendedDefaultTracerTest.java new file mode 100644 index 00000000000..791c6b7d47c --- /dev/null +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/trace/ExtendedDefaultTracerTest.java @@ -0,0 +1,65 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.trace; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.testing.internal.AbstractDefaultTracerTest; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.api.trace.TracerProvider; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +class ExtendedDefaultTracerTest extends AbstractDefaultTracerTest { + + @Override + public Tracer getTracer() { + return ExtendedDefaultTracer.getNoop(); + } + + @Override + public TracerProvider getTracerProvider() { + return ExtendedDefaultTracerProvider.getNoop(); + } + + @Test + public void incubatingApiIsLoaded() { + Tracer tracer = TracerProvider.noop().get("test"); + assertThat(tracer).isSameAs(OpenTelemetry.noop().getTracer("test")); + + assertThat(tracer).isInstanceOf(ExtendedTracer.class); + assertThat(tracer.spanBuilder("test")).isInstanceOf(ExtendedSpanBuilder.class); + } + + @SuppressWarnings("unchecked") + @Test + public void incubatingApi() { + ExtendedSpanBuilder spanBuilder = + (ExtendedSpanBuilder) ExtendedDefaultTracer.getNoop().spanBuilder("test"); + assertThat(spanBuilder.setParentFrom(null, null)).isSameAs(spanBuilder); + + SpanRunnable spanRunnable = Mockito.mock(SpanRunnable.class); + + spanBuilder.startAndRun(spanRunnable); + Mockito.verify(spanRunnable).runInSpan(); + Mockito.reset(spanRunnable); + + spanBuilder.startAndRun(spanRunnable, null); + Mockito.verify(spanRunnable).runInSpan(); + Mockito.reset(spanRunnable); + + SpanCallable spanCallable = Mockito.mock(SpanCallable.class); + + spanBuilder.startAndCall(spanCallable); + Mockito.verify(spanCallable).callInSpan(); + Mockito.reset(spanCallable); + + spanBuilder.startAndCall(spanCallable, null); + Mockito.verify(spanCallable).callInSpan(); + Mockito.reset(spanCallable); + } +} diff --git a/api/incubator/src/test/java/io/opentelemetry/api/incubator/trace/ExtendedTraceApiUsageTest.java b/api/incubator/src/test/java/io/opentelemetry/api/incubator/trace/ExtendedTraceApiUsageTest.java new file mode 100644 index 00000000000..752cd279650 --- /dev/null +++ b/api/incubator/src/test/java/io/opentelemetry/api/incubator/trace/ExtendedTraceApiUsageTest.java @@ -0,0 +1,279 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.incubator.trace; + +import static io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder.nameEquals; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.trace.internal.TracerConfig.disabled; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.TraceFlags; +import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.context.propagation.TextMapPropagator; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.IdGenerator; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.SdkTracerProviderBuilder; +import io.opentelemetry.sdk.trace.data.StatusData; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.internal.SdkTracerProviderUtil; +import java.util.HashMap; +import java.util.Map; +import java.util.Random; +import java.util.function.BiConsumer; +import org.junit.jupiter.api.Test; + +/** Demonstrating usage of extended Trace API. */ +class ExtendedTraceApiUsageTest { + + @Test + void tracerEnabled() { + // Setup SdkTracerProvider + InMemorySpanExporter exporter = InMemorySpanExporter.create(); + SdkTracerProviderBuilder tracerProviderBuilder = + SdkTracerProvider.builder() + // Default resource used for demonstration purposes + .setResource(Resource.getDefault()) + // In-memory exporter used for demonstration purposes + .addSpanProcessor(SimpleSpanProcessor.create(exporter)); + // Disable tracerB + SdkTracerProviderUtil.addTracerConfiguratorCondition( + tracerProviderBuilder, nameEquals("tracerB"), disabled()); + SdkTracerProvider tracerProvider = tracerProviderBuilder.build(); + + // Create tracerA and tracerB + ExtendedTracer tracerA = (ExtendedTracer) tracerProvider.get("tracerA"); + ExtendedTracer tracerB = (ExtendedTracer) tracerProvider.get("tracerB"); + + // Check if tracer is enabled before recording span and avoid unnecessary computation + if (tracerA.isEnabled()) { + tracerA + .spanBuilder("span name") + .startSpan() + .setAllAttributes(Attributes.builder().put("result", flipCoin()).build()) + .end(); + } + if (tracerB.isEnabled()) { + tracerB + .spanBuilder("span name") + .startSpan() + .setAllAttributes(Attributes.builder().put("result", flipCoin()).build()) + .end(); + } + + // tracerA is enabled, tracerB is disabled + assertThat(tracerA.isEnabled()).isTrue(); + assertThat(tracerB.isEnabled()).isFalse(); + + // Collected data only consists of spans from tracerA. Note, tracerB's spans would be + // omitted from the results even if spans were recorded. The check if enabled simply avoids + // unnecessary computation. + assertThat(exporter.getFinishedSpanItems()) + .allSatisfy( + spanData -> + assertThat(spanData.getInstrumentationScopeInfo().getName()).isEqualTo("tracerA")); + } + + private static final Random random = new Random(); + + private static String flipCoin() { + return random.nextBoolean() ? "heads" : "tails"; + } + + /** Demonstrates {@link ExtendedSpanBuilder#setParentFrom(ContextPropagators, Map)}. */ + @Test + void setParentFrom() { + // Setup SdkTracerProvider + InMemorySpanExporter spanExporter = InMemorySpanExporter.create(); + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + // Default resource used for demonstration purposes + .setResource(Resource.getDefault()) + // SimpleSpanProcessor with InMemorySpanExporter used for demonstration purposes + .addSpanProcessor(SimpleSpanProcessor.create(spanExporter)) + .build(); + + // Setup ContextPropagators + ContextPropagators contextPropagators = + ContextPropagators.create( + TextMapPropagator.composite(W3CTraceContextPropagator.getInstance())); + + // Get a Tracer for a scope + Tracer tracer = tracerProvider.get("org.foo.my-scope"); + + // Populate a map with W3C trace context + Map contextCarrier = new HashMap<>(); + SpanContext remoteParentContext = + SpanContext.createFromRemoteParent( + IdGenerator.random().generateTraceId(), + IdGenerator.random().generateSpanId(), + TraceFlags.getSampled(), + TraceState.getDefault()); + W3CTraceContextPropagator.getInstance() + .inject( + Context.current().with(Span.wrap(remoteParentContext)), + contextCarrier, + (carrier, key, value) -> { + if (carrier != null) { + carrier.put(key, value); + } + }); + + // Set parent from the Map context carrier + ((ExtendedSpanBuilder) tracer.spanBuilder("local_root")) + .setParentFrom(contextPropagators, contextCarrier) + .startSpan() + .end(); + + // Verify the span has the correct parent context + assertThat(spanExporter.getFinishedSpanItems()) + .satisfiesExactly( + span -> + assertThat(span) + .hasName("local_root") + .hasParentSpanId(remoteParentContext.getSpanId()) + .hasTraceId(remoteParentContext.getTraceId())); + } + + /** + * Demonstrates {@link ExtendedSpanBuilder#startAndCall(SpanCallable)}, {@link + * ExtendedSpanBuilder#startAndCall(SpanCallable, BiConsumer)}, {@link + * ExtendedSpanBuilder#startAndRun(SpanRunnable)}, {@link + * ExtendedSpanBuilder#startAndRun(SpanRunnable, BiConsumer)}. + */ + @Test + void startAndCallOrRun() { + // Setup SdkTracerProvider + InMemorySpanExporter spanExporter = InMemorySpanExporter.create(); + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + // Default resource used for demonstration purposes + .setResource(Resource.getDefault()) + // SimpleSpanProcessor with InMemorySpanExporter used for demonstration purposes + .addSpanProcessor(SimpleSpanProcessor.create(spanExporter)) + .build(); + + // Get a Tracer for a scope + Tracer tracer = tracerProvider.get("org.foo.my-scope"); + ExtendedTracer extendedTracer = (ExtendedTracer) tracer; + + // Wrap the resetCheckout method in a span + String cartId = + ((ExtendedSpanBuilder) tracer.spanBuilder("reset_checkout_and_return")) + .setAttribute("key123", "val456") + .startAndCall(() -> resetCheckoutAndReturn("abc123", /* throwException= */ false)); + assertThat(cartId).isEqualTo("abc123"); + // ...or use ExtendedTracer instance + // ...or runnable variation + extendedTracer + .spanBuilder("reset_checkout") + .startAndRun(() -> resetCheckout("abc123", /* throwException= */ false)); + + // Wrap the resetCheckout method in a span; resetCheckout throws an exception + try { + extendedTracer + .spanBuilder("reset_checkout_and_return") + .startAndCall(() -> resetCheckoutAndReturn("def456", /* throwException= */ true)); + } catch (Throwable e) { + // Ignore expected exception + } + // ...or runnable variation + try { + extendedTracer + .spanBuilder("reset_checkout") + .startAndRun(() -> resetCheckout("def456", /* throwException= */ true)); + } catch (Throwable e) { + // Ignore expected exception + } + + // Wrap the resetCheckout method in a span; resetCheckout throws an exception; use custom error + // handler + try { + extendedTracer + .spanBuilder("reset_checkout_and_return") + .startAndCall( + () -> resetCheckoutAndReturn("ghi789", /* throwException= */ true), + (span, throwable) -> span.setAttribute("my-attribute", "error")); + } catch (Throwable e) { + // Ignore expected exception + } + // ...or runnable variation + try { + extendedTracer + .spanBuilder("reset_checkout") + .startAndRun( + () -> resetCheckout("ghi789", /* throwException= */ true), + (span, throwable) -> span.setAttribute("my-attribute", "error")); + } catch (Throwable e) { + // Ignore expected exception + } + + // Verify the spans are as expected + assertThat(spanExporter.getFinishedSpanItems()) + .satisfiesExactly( + span -> + assertThat(span) + .hasName("reset_checkout_and_return") + .hasAttribute(AttributeKey.stringKey("cartId"), "abc123") + .hasStatus(StatusData.unset()) + .hasTotalRecordedEvents(0), + span -> + assertThat(span) + .hasName("reset_checkout") + .hasAttribute(AttributeKey.stringKey("cartId"), "abc123") + .hasStatus(StatusData.unset()) + .hasTotalRecordedEvents(0), + span -> + assertThat(span) + .hasName("reset_checkout_and_return") + .hasAttribute(AttributeKey.stringKey("cartId"), "def456") + .hasStatus(StatusData.error()) + .hasEventsSatisfyingExactly(event -> event.hasName("exception")), + span -> + assertThat(span) + .hasName("reset_checkout") + .hasAttribute(AttributeKey.stringKey("cartId"), "def456") + .hasStatus(StatusData.error()) + .hasEventsSatisfyingExactly(event -> event.hasName("exception")), + span -> + assertThat(span) + .hasName("reset_checkout_and_return") + .hasAttribute(AttributeKey.stringKey("cartId"), "ghi789") + .hasAttribute(AttributeKey.stringKey("my-attribute"), "error") + .hasStatus(StatusData.unset()) + .hasTotalRecordedEvents(0), + span -> + assertThat(span) + .hasName("reset_checkout") + .hasAttribute(AttributeKey.stringKey("cartId"), "ghi789") + .hasAttribute(AttributeKey.stringKey("my-attribute"), "error") + .hasStatus(StatusData.unset()) + .hasTotalRecordedEvents(0)); + } + + private static String resetCheckoutAndReturn(String cartId, boolean throwException) { + Span.current().setAttribute("cartId", cartId); + if (throwException) { + throw new RuntimeException("Error!"); + } + return cartId; + } + + private static void resetCheckout(String cartId, boolean throwException) { + Span.current().setAttribute("cartId", cartId); + if (throwException) { + throw new RuntimeException("Error!"); + } + } +} diff --git a/api/testing-internal/build.gradle.kts b/api/testing-internal/build.gradle.kts new file mode 100644 index 00000000000..b3e79cf0cfe --- /dev/null +++ b/api/testing-internal/build.gradle.kts @@ -0,0 +1,21 @@ +plugins { + id("otel.java-conventions") +} + +description = "OpenTelemetry API Testing (Internal)" +otelJava.moduleName.set("io.opentelemetry.api.testing.internal") + +dependencies { + api(project(":api:all")) + + implementation(project(":testing-internal")) + + implementation("com.linecorp.armeria:armeria-junit5") + implementation("org.assertj:assertj-core") + implementation("org.mockito:mockito-core") +} + +// Skip OWASP dependencyCheck task on test module +dependencyCheck { + skip = true +} diff --git a/api/testing-internal/src/main/java/io/opentelemetry/api/testing/internal/AbstractDefaultLoggerTest.java b/api/testing-internal/src/main/java/io/opentelemetry/api/testing/internal/AbstractDefaultLoggerTest.java new file mode 100644 index 00000000000..4b38e6fab19 --- /dev/null +++ b/api/testing-internal/src/main/java/io/opentelemetry/api/testing/internal/AbstractDefaultLoggerTest.java @@ -0,0 +1,70 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.testing.internal; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.logs.Logger; +import io.opentelemetry.api.logs.LoggerProvider; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.context.Context; +import java.time.Instant; +import java.util.concurrent.TimeUnit; +import org.junit.jupiter.api.Test; + +/** Unit tests for No-op {@link Logger}. */ +public abstract class AbstractDefaultLoggerTest { + + protected abstract LoggerProvider getLoggerProvider(); + + protected abstract Logger getLogger(); + + @Test + void noopLoggerProvider_doesNotThrow() { + LoggerProvider provider = LoggerProvider.noop(); + + assertThat(provider).isSameAs(getLoggerProvider()); + assertThatCode(() -> provider.get("scope-name")).doesNotThrowAnyException(); + assertThatCode( + () -> + provider + .loggerBuilder("scope-name") + .setInstrumentationVersion("1.0") + .setSchemaUrl("http://schema.com") + .build()) + .doesNotThrowAnyException(); + + assertThatCode(() -> provider.loggerBuilder("scope-name").build().logRecordBuilder()) + .doesNotThrowAnyException(); + } + + @Test + void buildAndEmit() { + assertThatCode( + () -> + getLogger() + .logRecordBuilder() + // TODO (trask) once event name stabilizes + // .setEventName("event name") + .setTimestamp(100, TimeUnit.SECONDS) + .setTimestamp(Instant.now()) + .setObservedTimestamp(100, TimeUnit.SECONDS) + .setObservedTimestamp(Instant.now()) + .setContext(Context.root()) + .setSeverity(Severity.DEBUG) + .setSeverityText("debug") + .setBody("body") + .setBody(Value.of("body")) + .setAttribute(AttributeKey.stringKey("key1"), "value1") + .setAllAttributes(Attributes.builder().put("key2", "value2").build()) + .emit()) + .doesNotThrowAnyException(); + } +} diff --git a/api/testing-internal/src/main/java/io/opentelemetry/api/testing/internal/AbstractDefaultMeterTest.java b/api/testing-internal/src/main/java/io/opentelemetry/api/testing/internal/AbstractDefaultMeterTest.java new file mode 100644 index 00000000000..b96de557c42 --- /dev/null +++ b/api/testing-internal/src/main/java/io/opentelemetry/api/testing/internal/AbstractDefaultMeterTest.java @@ -0,0 +1,264 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.testing.internal; + +import static io.opentelemetry.api.common.AttributeKey.stringKey; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleCounter; +import io.opentelemetry.api.metrics.DoubleGauge; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.DoubleUpDownCounter; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongGauge; +import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; +import io.opentelemetry.api.metrics.ObservableLongMeasurement; +import io.opentelemetry.context.Context; +import io.opentelemetry.internal.testing.slf4j.SuppressLogger; +import org.junit.jupiter.api.Test; + +/** Unit tests for No-op {@link Meter}. */ +@SuppressLogger() +public abstract class AbstractDefaultMeterTest { + private final Meter meter = getMeter(); + + protected abstract Meter getMeter(); + + protected abstract MeterProvider getMeterProvider(); + + @Test + void noopMeterProvider_getDoesNotThrow() { + MeterProvider provider = getMeterProvider(); + provider.get("user-instrumentation"); + } + + @Test + void noopMeterProvider_builderDoesNotThrow() { + MeterProvider provider = getMeterProvider(); + provider.meterBuilder("user-instrumentation").build(); + provider.meterBuilder("advanced-instrumetnation").setInstrumentationVersion("1.0").build(); + provider.meterBuilder("schema-instrumentation").setSchemaUrl("myschema://url").build(); + provider + .meterBuilder("schema-instrumentation") + .setInstrumentationVersion("1.0") + .setSchemaUrl("myschema://url") + .build(); + } + + @Test + void noopLongCounter_doesNotThrow() { + LongCounter counter = + meter.counterBuilder("size").setDescription("The size I'm measuring").setUnit("1").build(); + counter.add(1); + counter.add(1, Attributes.of(stringKey("thing"), "car")); + counter.add(1, Attributes.of(stringKey("thing"), "car"), Context.current()); + } + + @Test + void noopDoubleCounter_doesNotThrow() { + DoubleCounter counter = + meter + .counterBuilder("size") + .ofDoubles() + .setDescription("The size I'm measuring") + .setUnit("1") + .build(); + counter.add(1.2); + counter.add(2.5, Attributes.of(stringKey("thing"), "car")); + counter.add(2.5, Attributes.of(stringKey("thing"), "car"), Context.current()); + } + + @Test + void noopLongUpDownCounter_doesNotThrow() { + LongUpDownCounter counter = + meter + .upDownCounterBuilder("size") + .setDescription("The size I'm measuring") + .setUnit("1") + .build(); + counter.add(-1); + counter.add(1, Attributes.of(stringKey("thing"), "car")); + counter.add(1, Attributes.of(stringKey("thing"), "car"), Context.current()); + } + + @Test + void noopDoubleUpDownCounter_doesNotThrow() { + DoubleUpDownCounter counter = + meter + .upDownCounterBuilder("size") + .ofDoubles() + .setDescription("The size I'm measuring") + .setUnit("1") + .build(); + counter.add(-2e4); + counter.add(1.0e-1, Attributes.of(stringKey("thing"), "car")); + counter.add(1.0e-1, Attributes.of(stringKey("thing"), "car"), Context.current()); + } + + @Test + void noopLongHistogram_doesNotThrow() { + LongHistogram histogram = + meter + .histogramBuilder("size") + .ofLongs() + .setDescription("The size I'm measuring") + .setUnit("1") + .build(); + histogram.record(-1); + histogram.record(1, Attributes.of(stringKey("thing"), "car")); + histogram.record(1, Attributes.of(stringKey("thing"), "car"), Context.current()); + } + + @Test + void noopDoubleHistogram_doesNotThrow() { + DoubleHistogram histogram = + meter + .histogramBuilder("size") + .setDescription("The size I'm measuring") + .setUnit("1") + .build(); + histogram.record(-2e4); + histogram.record(1.0e-1, Attributes.of(stringKey("thing"), "car")); + histogram.record(1.0e-1, Attributes.of(stringKey("thing"), "car"), Context.current()); + } + + @Test + void noopLongGauage_doesNotThrow() { + LongGauge gauge = + meter + .gaugeBuilder("temperature") + .ofLongs() + .setDescription("The current temperature") + .setUnit("C") + .build(); + gauge.set(1); + gauge.set(2, Attributes.of(stringKey("thing"), "engine")); + gauge.set(2, Attributes.of(stringKey("thing"), "engine"), Context.current()); + + ObservableLongMeasurement measurement = + meter + .gaugeBuilder("temperature") + .ofLongs() + .setDescription("The current temperature") + .setUnit("C") + .buildObserver(); + measurement.record(1); + measurement.record(1, Attributes.of(stringKey("thing"), "engine")); + } + + @Test + void noopObservableLongGauage_doesNotThrow() { + meter + .gaugeBuilder("temperature") + .ofLongs() + .setDescription("The current temperature") + .setUnit("C") + .buildWithCallback( + m -> { + m.record(1); + m.record(2, Attributes.of(stringKey("thing"), "engine")); + }); + } + + @Test + void noopDoubleGauage_doesNotThrow() { + DoubleGauge gauge = + meter + .gaugeBuilder("temperature") + .setDescription("The current temperature") + .setUnit("C") + .build(); + gauge.set(1); + gauge.set(2, Attributes.of(stringKey("thing"), "engine")); + gauge.set(2, Attributes.of(stringKey("thing"), "engine"), Context.current()); + + ObservableDoubleMeasurement measurement = + meter + .gaugeBuilder("temperature") + .setDescription("The current temperature") + .setUnit("C") + .buildObserver(); + measurement.record(1.0); + measurement.record(1.0, Attributes.of(stringKey("thing"), "engine")); + } + + @Test + void noopObservableDoubleGauage_doesNotThrow() { + meter + .gaugeBuilder("temperature") + .setDescription("The current temperature") + .setUnit("C") + .buildWithCallback( + m -> { + m.record(1.0e1); + m.record(-27.4, Attributes.of(stringKey("thing"), "engine")); + }); + } + + @Test + void noopObservableLongCounter_doesNotThrow() { + meter + .counterBuilder("temperature") + .setDescription("The current temperature") + .setUnit("C") + .buildWithCallback( + m -> { + m.record(1); + m.record(2, Attributes.of(stringKey("thing"), "engine")); + }); + } + + @Test + void noopObservableDoubleCounter_doesNotThrow() { + meter + .counterBuilder("temperature") + .ofDoubles() + .setDescription("The current temperature") + .setUnit("C") + .buildWithCallback( + m -> { + m.record(1.0e1); + m.record(-27.4, Attributes.of(stringKey("thing"), "engine")); + }); + } + + @Test + void noopObservableLongUpDownCounter_doesNotThrow() { + meter + .upDownCounterBuilder("temperature") + .setDescription("The current temperature") + .setUnit("C") + .buildWithCallback( + m -> { + m.record(1); + m.record(2, Attributes.of(stringKey("thing"), "engine")); + }); + } + + @Test + void noopObservableDoubleUpDownCounter_doesNotThrow() { + meter + .upDownCounterBuilder("temperature") + .ofDoubles() + .setDescription("The current temperature") + .setUnit("C") + .buildWithCallback( + m -> { + m.record(1.0e1); + m.record(-27.4, Attributes.of(stringKey("thing"), "engine")); + }); + } + + @Test + @SuppressWarnings("NullAway") + void noopBatchCallback_doesNotThrow() { + meter.batchCallback(() -> {}, null); + } +} diff --git a/api/testing-internal/src/main/java/io/opentelemetry/api/testing/internal/AbstractDefaultTracerTest.java b/api/testing-internal/src/main/java/io/opentelemetry/api/testing/internal/AbstractDefaultTracerTest.java new file mode 100644 index 00000000000..c03f877f8a6 --- /dev/null +++ b/api/testing-internal/src/main/java/io/opentelemetry/api/testing/internal/AbstractDefaultTracerTest.java @@ -0,0 +1,164 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.testing.internal; + +import static io.opentelemetry.api.common.AttributeKey.longKey; +import static io.opentelemetry.api.common.AttributeKey.stringKey; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanBuilder; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.TraceFlags; +import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.api.trace.TracerProvider; +import io.opentelemetry.context.Context; +import java.time.Instant; +import java.util.concurrent.TimeUnit; +import org.junit.jupiter.api.Test; + +/** Unit tests for No-op {@link Tracer}. */ +// Need to suppress warnings for MustBeClosed because Android 14 does not support +// try-with-resources. +@SuppressWarnings("MustBeClosedChecker") +public abstract class AbstractDefaultTracerTest { + private final Tracer defaultTracer = getTracer(); + private static final String SPAN_NAME = "MySpanName"; + private static final SpanContext spanContext = + SpanContext.create( + "00000000000000000000000000000061", + "0000000000000061", + TraceFlags.getDefault(), + TraceState.getDefault()); + + public abstract Tracer getTracer(); + + public abstract TracerProvider getTracerProvider(); + + @Test + void returnsDefaultTracer() { + TracerProvider tracerProvider = getTracerProvider(); + Class want = defaultTracer.getClass(); + assertThat( + tracerProvider + .tracerBuilder("test") + .setSchemaUrl("schema") + .setInstrumentationVersion("1") + .build()) + .isInstanceOf(want); + assertThat(tracerProvider.get("test")).isInstanceOf(want); + assertThat(tracerProvider.get("test", "1.0")).isInstanceOf(want); + } + + @Test + void defaultSpanBuilderWithName() { + assertThat(defaultTracer.spanBuilder(SPAN_NAME).startSpan().getSpanContext().isValid()) + .isFalse(); + } + + @Test + @SuppressWarnings("NullAway") + void spanContextPropagationExplicitParent() { + assertThat( + defaultTracer + .spanBuilder(SPAN_NAME) + .setParent(Context.root().with(Span.wrap(spanContext))) + .startSpan() + .getSpanContext()) + .isSameAs(spanContext); + + SpanBuilder builder = defaultTracer.spanBuilder(SPAN_NAME); + assertThat(builder.setParent(null)).isSameAs(builder); + } + + @Test + void spanContextPropagation() { + Span parent = Span.wrap(spanContext); + + Span span = + defaultTracer.spanBuilder(SPAN_NAME).setParent(Context.root().with(parent)).startSpan(); + assertThat(span.getSpanContext()).isSameAs(spanContext); + } + + @Test + void noSpanContextMakesInvalidSpans() { + Span span = defaultTracer.spanBuilder(SPAN_NAME).startSpan(); + assertThat(span.getSpanContext()).isSameAs(SpanContext.getInvalid()); + } + + @Test + void spanContextPropagation_fromContext() { + Context context = Context.current().with(Span.wrap(spanContext)); + + Span span = defaultTracer.spanBuilder(SPAN_NAME).setParent(context).startSpan(); + assertThat(span.getSpanContext()).isSameAs(spanContext); + } + + @Test + void spanContextPropagation_fromContextAfterNoParent() { + Context context = Context.current().with(Span.wrap(spanContext)); + + Span span = defaultTracer.spanBuilder(SPAN_NAME).setNoParent().setParent(context).startSpan(); + assertThat(span.getSpanContext()).isSameAs(spanContext); + } + + @Test + void spanContextPropagation_fromContextThenNoParent() { + Context context = Context.current().with(Span.wrap(spanContext)); + + Span span = defaultTracer.spanBuilder(SPAN_NAME).setParent(context).setNoParent().startSpan(); + assertThat(span.getSpanContext()).isEqualTo(SpanContext.getInvalid()); + } + + @Test + @SuppressWarnings("NullAway") + void doNotCrash_NoopImplementation() { + assertThatCode( + () -> { + SpanBuilder spanBuilder = defaultTracer.spanBuilder(null); + spanBuilder.setSpanKind(null); + spanBuilder.setParent(null); + spanBuilder.setNoParent(); + spanBuilder.addLink(null); + spanBuilder.addLink(null, Attributes.empty()); + spanBuilder.addLink(SpanContext.getInvalid(), null); + spanBuilder.setAttribute((String) null, "foo"); + spanBuilder.setAttribute("foo", null); + spanBuilder.setAttribute(null, 0L); + spanBuilder.setAttribute(null, 0.0); + spanBuilder.setAttribute(null, false); + spanBuilder.setAttribute((AttributeKey) null, "foo"); + spanBuilder.setAttribute(stringKey(null), "foo"); + spanBuilder.setAttribute(stringKey(""), "foo"); + spanBuilder.setAttribute(stringKey("foo"), null); + spanBuilder.setStartTimestamp(-1, TimeUnit.MILLISECONDS); + spanBuilder.setStartTimestamp(1, null); + spanBuilder.setParent(Context.root().with(Span.wrap(null))); + spanBuilder.setParent(Context.root()); + spanBuilder.setNoParent(); + spanBuilder.addLink(Span.getInvalid().getSpanContext()); + spanBuilder.addLink(Span.getInvalid().getSpanContext(), Attributes.empty()); + spanBuilder.setAttribute("key", "value"); + spanBuilder.setAttribute("key", 12345L); + spanBuilder.setAttribute("key", .12345); + spanBuilder.setAttribute("key", true); + spanBuilder.setAttribute(stringKey("key"), "value"); + spanBuilder.setAllAttributes(Attributes.of(stringKey("key"), "value")); + spanBuilder.setAllAttributes(Attributes.empty()); + spanBuilder.setAllAttributes(null); + spanBuilder.setStartTimestamp(12345L, TimeUnit.NANOSECONDS); + spanBuilder.setStartTimestamp(Instant.EPOCH); + spanBuilder.setStartTimestamp(null); + spanBuilder.setAttribute(longKey("MyLongAttributeKey"), 123); + assertThat(spanBuilder.startSpan().getSpanContext().isValid()).isFalse(); + }) + .doesNotThrowAnyException(); + } +} diff --git a/api/testing-internal/src/main/java/io/opentelemetry/api/testing/internal/AbstractOpenTelemetryTest.java b/api/testing-internal/src/main/java/io/opentelemetry/api/testing/internal/AbstractOpenTelemetryTest.java new file mode 100644 index 00000000000..4223d853661 --- /dev/null +++ b/api/testing-internal/src/main/java/io/opentelemetry/api/testing/internal/AbstractOpenTelemetryTest.java @@ -0,0 +1,118 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.api.testing.internal; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.logs.LoggerProvider; +import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.api.trace.TracerProvider; +import io.opentelemetry.context.propagation.ContextPropagators; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +/** Unit tests for No-op {@link OpenTelemetry}. */ +public abstract class AbstractOpenTelemetryTest { + + @BeforeAll + public static void beforeClass() { + GlobalOpenTelemetry.resetForTest(); + } + + private void setOpenTelemetry() { + GlobalOpenTelemetry.set(getOpenTelemetry()); + } + + private static OpenTelemetry getGlobalOpenTelemetry() { + return GlobalOpenTelemetry.get(); + } + + @AfterEach + public void after() { + GlobalOpenTelemetry.resetForTest(); + } + + @Test + void testDefault() { + assertThat(getOpenTelemetry().getTracerProvider()).isSameAs(getTracerProvider()); + assertThat(getOpenTelemetry().getPropagators()).isSameAs(ContextPropagators.noop()); + assertThat(getOpenTelemetry().getMeterProvider()).isSameAs(getMeterProvider()); + assertThat(getOpenTelemetry().getLogsBridge()).isSameAs(getLoggerProvider()); + } + + protected abstract TracerProvider getTracerProvider(); + + protected OpenTelemetry getOpenTelemetry() { + return OpenTelemetry.noop(); + } + + protected abstract MeterProvider getMeterProvider(); + + protected abstract LoggerProvider getLoggerProvider(); + + @Test + void propagating() { + ContextPropagators contextPropagators = Mockito.mock(ContextPropagators.class); + OpenTelemetry openTelemetry = OpenTelemetry.propagating(contextPropagators); + + assertThat(openTelemetry.getTracerProvider()).isSameAs(getTracerProvider()); + assertThat(openTelemetry.getMeterProvider()).isSameAs(getMeterProvider()); + assertThat(openTelemetry.getLogsBridge()).isSameAs(getLoggerProvider()); + assertThat(openTelemetry.getPropagators()).isSameAs(contextPropagators); + } + + @Test + void testGlobalBeforeSet() { + assertThat(GlobalOpenTelemetry.getTracerProvider()).isSameAs(getTracerProvider()); + assertThat(GlobalOpenTelemetry.getTracerProvider()) + .isSameAs(GlobalOpenTelemetry.getTracerProvider()); + assertThat(GlobalOpenTelemetry.getPropagators()).isSameAs(GlobalOpenTelemetry.getPropagators()); + } + + @Test + void independentNonGlobalPropagators() { + ContextPropagators propagators1 = Mockito.mock(ContextPropagators.class); + OpenTelemetry otel1 = OpenTelemetry.propagating(propagators1); + ContextPropagators propagators2 = Mockito.mock(ContextPropagators.class); + OpenTelemetry otel2 = OpenTelemetry.propagating(propagators2); + + assertThat(otel1.getPropagators()).isSameAs(propagators1); + assertThat(otel2.getPropagators()).isSameAs(propagators2); + } + + @Test + void setThenSet() { + setOpenTelemetry(); + assertThatThrownBy(() -> GlobalOpenTelemetry.set(getOpenTelemetry())) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("GlobalOpenTelemetry.set has already been called") + .hasStackTraceContaining("setOpenTelemetry"); + } + + @Test + void getThenSet() { + assertThat(getGlobalOpenTelemetry().getClass().getName()) + .isEqualTo("io.opentelemetry.api.DefaultOpenTelemetry"); + assertThatThrownBy(() -> GlobalOpenTelemetry.set(getOpenTelemetry())) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("GlobalOpenTelemetry.set has already been called") + .hasStackTraceContaining("getGlobalOpenTelemetry"); + } + + @Test + void toString_noop_Valid() { + assertThat(getOpenTelemetry().toString()) + .isEqualTo( + "DefaultOpenTelemetry{" + + "propagators=DefaultContextPropagators{textMapPropagator=NoopTextMapPropagator}" + + "}"); + } +} diff --git a/bom/build.gradle.kts b/bom/build.gradle.kts index ae50cabc349..50d8dc950a1 100644 --- a/bom/build.gradle.kts +++ b/bom/build.gradle.kts @@ -7,10 +7,3 @@ group = "io.opentelemetry" base.archivesName.set("opentelemetry-bom") otelBom.projectFilter.set { !it.hasProperty("otel.release") } - -// Artifacts that were previously published and included in the BOM for backwards compatibility -otelBom.addFallback("opentelemetry-exporter-jaeger-proto", "1.17.0") -otelBom.addFallback("opentelemetry-extension-annotations", "1.18.0") -otelBom.addFallback("opentelemetry-sdk-extension-resources", "1.19.0") -otelBom.addFallback("opentelemetry-sdk-extension-aws", "1.19.0") -otelBom.addFallback("opentelemetry-extension-aws", "1.20.1") diff --git a/buildSrc/build.gradle.kts b/buildSrc/build.gradle.kts index 799a324cbb8..8d50a09a5f4 100644 --- a/buildSrc/build.gradle.kts +++ b/buildSrc/build.gradle.kts @@ -2,18 +2,26 @@ plugins { `kotlin-dsl` // When updating, update below in dependencies too - id("com.diffplug.spotless") version "6.21.0" + id("com.diffplug.spotless") version "7.0.2" } -if (!JavaVersion.current().isCompatibleWith(JavaVersion.VERSION_17)) { +if (!hasLauncherForJavaVersion(17)) { throw GradleException( - "JDK 17 or higher is required to build. " + - "One option is to download it from https://adoptium.net/. If you believe you already " + - "have it, please check that the JAVA_HOME environment variable is pointing at the " + - "JDK 17 installation.", + "JDK 17 is required to build and gradle was unable to detect it on the system. " + + "Please install it and see https://docs.gradle.org/current/userguide/toolchains.html#sec:auto_detection " + + "for details on how gradle detects java toolchains." ) } +fun hasLauncherForJavaVersion(version: Int): Boolean { + return try { + javaToolchains.launcherFor { languageVersion = JavaLanguageVersion.of(version) }.get() + true + } catch (e: Exception) { + false + } +} + spotless { kotlinGradle { ktlint().editorConfigOverride(mapOf( @@ -42,25 +50,25 @@ repositories { } dependencies { - implementation(enforcedPlatform("com.squareup.wire:wire-bom:4.8.1")) - implementation("com.google.auto.value:auto-value-annotations:1.10.3") + implementation(enforcedPlatform("com.squareup.wire:wire-bom:5.3.1")) + implementation("com.google.auto.value:auto-value-annotations:1.11.0") // When updating, update above in plugins too - implementation("com.diffplug.spotless:spotless-plugin-gradle:6.21.0") + implementation("com.diffplug.spotless:spotless-plugin-gradle:7.0.2") // Needed for japicmp but not automatically brought in for some reason. - implementation("com.google.guava:guava:32.1.2-jre") + implementation("com.google.guava:guava:33.4.6-jre") + implementation("com.gradle.develocity:com.gradle.develocity.gradle.plugin:3.19.2") implementation("com.squareup:javapoet:1.13.0") implementation("com.squareup.wire:wire-compiler") implementation("com.squareup.wire:wire-gradle-plugin") implementation("gradle.plugin.com.google.protobuf:protobuf-gradle-plugin:0.8.18") - implementation("gradle.plugin.io.morethan.jmhreport:gradle-jmh-report:0.9.0") - implementation("me.champeau.gradle:japicmp-gradle-plugin:0.4.2") - implementation("me.champeau.jmh:jmh-gradle-plugin:0.7.1") - implementation("net.ltgt.gradle:gradle-errorprone-plugin:3.1.0") - implementation("net.ltgt.gradle:gradle-nullaway-plugin:1.6.0") - // at the moment 1.9.0 is the latest version supported by codeql - implementation("org.jetbrains.kotlin:kotlin-gradle-plugin:1.9.10") - implementation("org.owasp:dependency-check-gradle:8.4.0") - implementation("ru.vyarus:gradle-animalsniffer-plugin:1.7.1") + implementation("gradle.plugin.io.morethan.jmhreport:gradle-jmh-report:0.9.6") + implementation("me.champeau.gradle:japicmp-gradle-plugin:0.4.6") + implementation("me.champeau.jmh:jmh-gradle-plugin:0.7.3") + implementation("net.ltgt.gradle:gradle-errorprone-plugin:4.1.0") + implementation("net.ltgt.gradle:gradle-nullaway-plugin:2.2.0") + implementation("org.jetbrains.kotlin:kotlin-gradle-plugin:2.1.20") + implementation("org.owasp:dependency-check-gradle:12.1.0") + implementation("ru.vyarus:gradle-animalsniffer-plugin:2.0.0") } // We can't apply conventions to this build so include important ones such as the Java compilation diff --git a/buildSrc/src/main/kotlin/io/opentelemetry/gradle/OtelBomExtension.kt b/buildSrc/src/main/kotlin/io/opentelemetry/gradle/OtelBomExtension.kt index a3ba9e47f3a..5857b99808b 100644 --- a/buildSrc/src/main/kotlin/io/opentelemetry/gradle/OtelBomExtension.kt +++ b/buildSrc/src/main/kotlin/io/opentelemetry/gradle/OtelBomExtension.kt @@ -11,13 +11,4 @@ import java.util.function.Predicate abstract class OtelBomExtension { abstract val projectFilter: Property> - val additionalDependencies: MutableSet = hashSetOf() - - fun addFallback(artifactId: String, version: String) { - this.additionalDependencies.add("io.opentelemetry:" + artifactId + ":" + version) - } - - fun addExtra(groupId: String, artifactId: String, version: String) { - this.additionalDependencies.add(groupId + ":" + artifactId + ":" + version) - } } diff --git a/buildSrc/src/main/kotlin/io/opentelemetry/gradle/ProtoFieldsWireHandlerFactory.kt b/buildSrc/src/main/kotlin/io/opentelemetry/gradle/ProtoFieldsWireHandlerFactory.kt index 2aca450347f..cd99a69dc44 100644 --- a/buildSrc/src/main/kotlin/io/opentelemetry/gradle/ProtoFieldsWireHandlerFactory.kt +++ b/buildSrc/src/main/kotlin/io/opentelemetry/gradle/ProtoFieldsWireHandlerFactory.kt @@ -3,10 +3,6 @@ package io.opentelemetry.gradle import com.squareup.wire.schema.SchemaHandler class ProtoFieldsWireHandlerFactory : SchemaHandler.Factory{ - @Deprecated("deprecated in parent") - override fun create(): SchemaHandler { - return ProtoFieldsWireHandler() - } override fun create( includes: List, @@ -15,8 +11,7 @@ class ProtoFieldsWireHandlerFactory : SchemaHandler.Factory{ outDirectory: String, options: Map ): SchemaHandler { - @Suppress("DEPRECATION") - return create() + return ProtoFieldsWireHandler() } } diff --git a/buildSrc/src/main/kotlin/otel.animalsniffer-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.animalsniffer-conventions.gradle.kts index 9680996fcc8..73bbf0dd2d7 100644 --- a/buildSrc/src/main/kotlin/otel.animalsniffer-conventions.gradle.kts +++ b/buildSrc/src/main/kotlin/otel.animalsniffer-conventions.gradle.kts @@ -7,7 +7,7 @@ plugins { } dependencies { - add("signature", "com.toasttab.android:gummy-bears-api-21:0.3.0:coreLib@signature") + signature(project(path = ":animal-sniffer-signature", configuration = "generatedSignature")) } animalsniffer { diff --git a/buildSrc/src/main/kotlin/otel.bom-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.bom-conventions.gradle.kts index c9e7973b7f2..43674ccc6e5 100644 --- a/buildSrc/src/main/kotlin/otel.bom-conventions.gradle.kts +++ b/buildSrc/src/main/kotlin/otel.bom-conventions.gradle.kts @@ -61,11 +61,4 @@ afterEvaluate { } } } - otelBom.additionalDependencies.forEach { dependency -> - dependencies { - constraints { - api(dependency) - } - } - } } diff --git a/buildSrc/src/main/kotlin/otel.errorprone-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.errorprone-conventions.gradle.kts index e96a95f7992..3943bc21bbe 100644 --- a/buildSrc/src/main/kotlin/otel.errorprone-conventions.gradle.kts +++ b/buildSrc/src/main/kotlin/otel.errorprone-conventions.gradle.kts @@ -10,6 +10,7 @@ plugins { dependencies { errorprone("com.google.errorprone:error_prone_core") errorprone("com.uber.nullaway:nullaway") + errorprone(project(":custom-checks")) } val disableErrorProne = properties["disableErrorProne"]?.toString()?.toBoolean() ?: false @@ -86,9 +87,14 @@ tasks { // cognitive load is dubious. disable("YodaCondition") - if (name.contains("Jmh") || name.contains("Test")) { + // Text blocks are not supported in java 8 + disable("StringConcatToTextBlock") + + if ((name.contains("Jmh") || name.contains("Test") || project.name.contains("testing-internal")) && !project.name.equals("custom-checks")) { // Allow underscore in test-type method names disable("MemberName") + // Internal javadoc not needed for test or jmh classes + disable("OtelInternalJavadoc") } option("NullAway:CustomContractAnnotations", "io.opentelemetry.api.internal.Contract") diff --git a/buildSrc/src/main/kotlin/otel.jacoco-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.jacoco-conventions.gradle.kts index f672ef2dcad..07c2f5e8d62 100644 --- a/buildSrc/src/main/kotlin/otel.jacoco-conventions.gradle.kts +++ b/buildSrc/src/main/kotlin/otel.jacoco-conventions.gradle.kts @@ -5,7 +5,7 @@ plugins { } jacoco { - toolVersion = "0.8.10" + toolVersion = "0.8.13" } // https://docs.gradle.org/current/samples/sample_jvm_multi_project_with_code_coverage.html diff --git a/buildSrc/src/main/kotlin/otel.japicmp-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.japicmp-conventions.gradle.kts index c720b307486..46901734b23 100644 --- a/buildSrc/src/main/kotlin/otel.japicmp-conventions.gradle.kts +++ b/buildSrc/src/main/kotlin/otel.japicmp-conventions.gradle.kts @@ -1,16 +1,8 @@ import com.google.auto.value.AutoValue -import japicmp.model.JApiChangeStatus -import japicmp.model.JApiClass -import japicmp.model.JApiCompatibility -import japicmp.model.JApiCompatibilityChange -import japicmp.model.JApiMethod +import japicmp.model.* import me.champeau.gradle.japicmp.JapicmpTask import me.champeau.gradle.japicmp.report.Violation -import me.champeau.gradle.japicmp.report.stdrules.AbstractRecordingSeenMembers -import me.champeau.gradle.japicmp.report.stdrules.BinaryIncompatibleRule -import me.champeau.gradle.japicmp.report.stdrules.RecordSeenMembersSetup -import me.champeau.gradle.japicmp.report.stdrules.SourceCompatibleRule -import me.champeau.gradle.japicmp.report.stdrules.UnchangedMemberRule +import me.champeau.gradle.japicmp.report.stdrules.* plugins { @@ -35,26 +27,30 @@ val latestReleasedVersion: String by lazy { class AllowNewAbstractMethodOnAutovalueClasses : AbstractRecordingSeenMembers() { override fun maybeAddViolation(member: JApiCompatibility): Violation? { - val allowableAutovalueChanges = setOf(JApiCompatibilityChange.METHOD_ABSTRACT_ADDED_TO_CLASS, JApiCompatibilityChange.METHOD_ADDED_TO_PUBLIC_CLASS) - if (member.compatibilityChanges.filter { !allowableAutovalueChanges.contains(it) }.isEmpty() && - member is JApiMethod && - member.getjApiClass().newClass.get().getAnnotation(AutoValue::class.java) != null - ) { + val allowableAutovalueChanges = setOf(JApiCompatibilityChangeType.METHOD_ABSTRACT_ADDED_TO_CLASS, + JApiCompatibilityChangeType.METHOD_ADDED_TO_PUBLIC_CLASS, JApiCompatibilityChangeType.ANNOTATION_ADDED) + if (member.compatibilityChanges.filter { !allowableAutovalueChanges.contains(it.type) }.isEmpty() && + member is JApiMethod && isAutoValueClass(member.getjApiClass())) + { return Violation.accept(member, "Autovalue will automatically add implementation") } if (member.compatibilityChanges.isEmpty() && - member is JApiClass && - member.newClass.get().getAnnotation(AutoValue::class.java) != null) { + member is JApiClass && isAutoValueClass(member)) { return Violation.accept(member, "Autovalue class modification is allowed") } return null } + + fun isAutoValueClass(japiClass: JApiClass): Boolean { + return japiClass.newClass.get().getAnnotation(AutoValue::class.java) != null || + japiClass.newClass.get().getAnnotation(AutoValue.Builder::class.java) != null + } } class SourceIncompatibleRule : AbstractRecordingSeenMembers() { override fun maybeAddViolation(member: JApiCompatibility): Violation? { if (!member.isSourceCompatible()) { - return Violation.error(member, "Not source compatible") + return Violation.error(member, "Not source compatible: $member") } return null } @@ -115,6 +111,8 @@ if (!project.hasProperty("otel.release") && !project.name.startsWith("bom")) { // Reproduce defaults from https://github.com/melix/japicmp-gradle-plugin/blob/09f52739ef1fccda6b4310cf3f4b19dc97377024/src/main/java/me/champeau/gradle/japicmp/report/ViolationsGenerator.java#L130 // with some changes. val exclusions = mutableListOf() + // Generics are not detected correctly + exclusions.add("CLASS_GENERIC_TEMPLATE_CHANGED") // Allow new default methods on interfaces exclusions.add("METHOD_NEW_DEFAULT") // Allow adding default implementations for default methods @@ -137,7 +135,13 @@ if (!project.hasProperty("otel.release") && !project.name.startsWith("bom")) { // this is needed so that we only consider the current artifact, and not dependencies ignoreMissingClasses.set(true) - packageExcludes.addAll("*.internal", "*.internal.*", "io.opentelemetry.internal.shaded.jctools.*") + packageExcludes.addAll( + "*.internal", + "*.internal.*", + "io.opentelemetry.internal.shaded.jctools.*", + // Temporarily suppress warnings from public generated classes from :sdk-extensions:jaeger-remote-sampler + "io.opentelemetry.sdk.extension.trace.jaeger.proto.api_v2" + ) val baseVersionString = if (apiBaseVersion == null) "latest" else baselineVersion txtOutputFile.set( apiNewVersion?.let { file("$rootDir/docs/apidiffs/${apiNewVersion}_vs_$baselineVersion/${base.archivesName.get()}.txt") } diff --git a/buildSrc/src/main/kotlin/otel.java-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.java-conventions.gradle.kts index bf63d7db4c5..d1957a45ebd 100644 --- a/buildSrc/src/main/kotlin/otel.java-conventions.gradle.kts +++ b/buildSrc/src/main/kotlin/otel.java-conventions.gradle.kts @@ -24,6 +24,13 @@ base { } } +// normalize timestamps and file ordering in jars, making the outputs reproducible +// see open-telemetry/opentelemetry-java#4488 +tasks.withType().configureEach { + isPreserveFileTimestamps = false + isReproducibleFileOrder = true +} + java { toolchain { languageVersion.set(JavaLanguageVersion.of(17)) @@ -35,25 +42,31 @@ java { checkstyle { configDirectory.set(file("$rootDir/buildscripts/")) - toolVersion = "10.12.3" + toolVersion = "10.23.0" isIgnoreFailures = false configProperties["rootDir"] = rootDir } dependencyCheck { - skipConfigurations = listOf( + skipConfigurations = mutableListOf( "errorprone", "checkstyle", "annotationProcessor", + "java9AnnotationProcessor", + "moduleAnnotationProcessor", + "testAnnotationProcessor", + "testJpmsAnnotationProcessor", "animalsniffer", - "spotless865457264", // spotless865457264 is a weird configuration that's only added in jaeger-proto, jaeger-remote-sampler + "spotless996155815", // spotless996155815 is a weird configuration that's only added in jaeger-proto, jaeger-remote-sampler "js2p", "jmhAnnotationProcessor", + "jmhBasedTestAnnotationProcessor", "jmhCompileClasspath", "jmhRuntimeClasspath", "jmhRuntimeOnly") failBuildOnCVSS = 7.0f // fail on high or critical CVE analyzers.assemblyEnabled = false // not sure why its trying to analyze .NET assemblies + nvd.apiKey = System.getenv("NVD_API_KEY") } val testJavaVersion = gradle.startParameter.projectProperties.get("testJavaVersion")?.let(JavaVersion::toVersion) @@ -75,7 +88,6 @@ tasks { "-Xlint:-processing", // We suppress the "options" warning because it prevents compilation on modern JDKs "-Xlint:-options", - // Fail build on any warning "-Werror", ), @@ -102,6 +114,14 @@ tasks { ) } + val defaultMaxRetries = if (System.getenv().containsKey("CI")) 2 else 0 + val maxTestRetries = gradle.startParameter.projectProperties["maxTestRetries"]?.toInt() ?: defaultMaxRetries + + develocity.testRetry { + // You can see tests that were retried by this mechanism in the collected test reports and build scans. + maxRetries.set(maxTestRetries); + } + testLogging { exceptionFormat = TestExceptionFormat.FULL showExceptions = true @@ -123,12 +143,6 @@ tasks { breakIterator(true) addBooleanOption("html5", true) - - // TODO (trask) revisit to see if url is fixed - // currently broken because https://docs.oracle.com/javase/8/docs/api/element-list is missing - // and redirects - // links("https://docs.oracle.com/javase/8/docs/api/") - addBooleanOption("Xdoclint:all,-missing", true) } } diff --git a/buildSrc/src/main/kotlin/otel.jmh-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.jmh-conventions.gradle.kts index 5434e9f9181..3e4ad43195c 100644 --- a/buildSrc/src/main/kotlin/otel.jmh-conventions.gradle.kts +++ b/buildSrc/src/main/kotlin/otel.jmh-conventions.gradle.kts @@ -7,6 +7,11 @@ dependencies { jmh(platform(project(":dependencyManagement"))) jmh("org.openjdk.jmh:jmh-core") jmh("org.openjdk.jmh:jmh-generator-bytecode") + + // This enables running JMH benchmark classes within IntelliJ using + // JMH plugins + jmh("org.openjdk.jmh:jmh-generator-annprocess") + jmhAnnotationProcessor("org.openjdk.jmh:jmh-generator-annprocess") } // invoke jmh on a single benchmark class like so: diff --git a/buildSrc/src/main/kotlin/otel.spotless-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.spotless-conventions.gradle.kts index 5aec55a0407..b1c39dcd0db 100644 --- a/buildSrc/src/main/kotlin/otel.spotless-conventions.gradle.kts +++ b/buildSrc/src/main/kotlin/otel.spotless-conventions.gradle.kts @@ -70,7 +70,7 @@ spotless { "*.sh", "src/**/*.properties", ) - indentWithSpaces() + leadingTabsToSpaces() trimTrailingWhitespace() endWithNewline() } diff --git a/buildscripts/checkstyle-suppressions.xml b/buildscripts/checkstyle-suppressions.xml index bee50be5ec9..f5501405d3a 100644 --- a/buildscripts/checkstyle-suppressions.xml +++ b/buildscripts/checkstyle-suppressions.xml @@ -6,7 +6,6 @@ - diff --git a/buildscripts/semantic-convention/.gitignore b/buildscripts/semantic-convention/.gitignore deleted file mode 100644 index a93b221beb5..00000000000 --- a/buildscripts/semantic-convention/.gitignore +++ /dev/null @@ -1 +0,0 @@ -opentelemetry-specification/ diff --git a/buildscripts/semantic-convention/generate.sh b/buildscripts/semantic-convention/generate.sh deleted file mode 100755 index 9e8e51ec36b..00000000000 --- a/buildscripts/semantic-convention/generate.sh +++ /dev/null @@ -1,52 +0,0 @@ -#!/bin/bash - -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -ROOT_DIR="${SCRIPT_DIR}/../../" - -# freeze the spec & generator tools versions to make SemanticAttributes generation reproducible -SEMCONV_VERSION=1.20.0 -SPEC_VERSION=v$SEMCONV_VERSION -SCHEMA_URL=https://opentelemetry.io/schemas/$SEMCONV_VERSION -GENERATOR_VERSION=0.18.0 - -cd ${SCRIPT_DIR} - -rm -rf opentelemetry-specification || true -mkdir opentelemetry-specification -cd opentelemetry-specification - -git init -git remote add origin https://github.com/open-telemetry/opentelemetry-specification.git -git fetch origin "$SPEC_VERSION" -git reset --hard FETCH_HEAD -cd ${SCRIPT_DIR} - -docker run --rm \ - -v ${SCRIPT_DIR}/opentelemetry-specification/semantic_conventions:/source \ - -v ${SCRIPT_DIR}/templates:/templates \ - -v ${ROOT_DIR}/semconv/src/main/java/io/opentelemetry/semconv/trace/attributes/:/output \ - otel/semconvgen:$GENERATOR_VERSION \ - --only span,event,attribute_group,scope \ - -f /source code \ - --template /templates/SemanticAttributes.java.j2 \ - --output /output/SemanticAttributes.java \ - -Dsemconv=trace \ - -Dclass=SemanticAttributes \ - -DschemaUrl=$SCHEMA_URL \ - -Dpkg=io.opentelemetry.semconv.trace.attributes - -docker run --rm \ - -v ${SCRIPT_DIR}/opentelemetry-specification/semantic_conventions:/source \ - -v ${SCRIPT_DIR}/templates:/templates \ - -v ${ROOT_DIR}/semconv/src/main/java/io/opentelemetry/semconv/resource/attributes/:/output \ - otel/semconvgen:$GENERATOR_VERSION \ - --only resource \ - -f /source code \ - --template /templates/SemanticAttributes.java.j2 \ - --output /output/ResourceAttributes.java \ - -Dclass=ResourceAttributes \ - -DschemaUrl=$SCHEMA_URL \ - -Dpkg=io.opentelemetry.semconv.resource.attributes - -cd "$ROOT_DIR" -./gradlew spotlessApply diff --git a/buildscripts/semantic-convention/templates/SemanticAttributes.java.j2 b/buildscripts/semantic-convention/templates/SemanticAttributes.java.j2 deleted file mode 100644 index 8c224df92f9..00000000000 --- a/buildscripts/semantic-convention/templates/SemanticAttributes.java.j2 +++ /dev/null @@ -1,486 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - - -{%- macro to_java_return_type(type) -%} - {%- if type == "string" -%} - String - {%- elif type == "string[]" -%} - List - {%- elif type == "boolean" -%} - boolean - {%- elif type == "int" -%} - long - {%- elif type == "double" -%} - double - {%- else -%} - {{type}} - {%- endif -%} -{%- endmacro %} -{%- macro to_java_key_type(type) -%} - {%- if type == "string" -%} - stringKey - {%- elif type == "string[]" -%} - stringArrayKey - {%- elif type == "boolean" -%} - booleanKey - {%- elif type == "int" -%} - longKey - {%- elif type == "double" -%} - doubleKey - {%- else -%} - {{lowerFirst(type)}}Key - {%- endif -%} -{%- endmacro %} -{%- macro print_value(type, value) -%} - {{ "\"" if type == "String"}}{{value}}{{ "\"" if type == "String"}} -{%- endmacro %} -{%- macro upFirst(text) -%} - {{ text[0]|upper}}{{text[1:] }} -{%- endmacro %} -{%- macro lowerFirst(text) -%} - {{ text[0]|lower}}{{text[1:] }} -{%- endmacro %} - -package {{pkg | trim}}; - -import static io.opentelemetry.api.common.AttributeKey.booleanKey; -import static io.opentelemetry.api.common.AttributeKey.doubleKey; -import static io.opentelemetry.api.common.AttributeKey.longKey; -import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.api.common.AttributeKey.stringArrayKey; - -import io.opentelemetry.api.common.AttributeKey; -import java.util.List; - -// DO NOT EDIT, this is an Auto-generated file from buildscripts/semantic-convention{{template}} -@SuppressWarnings("unused") -public final class {{class}} { - /** - * The URL of the OpenTelemetry schema for these keys and values. - */ - public static final String SCHEMA_URL = "{{schemaUrl}}"; - {%- for attribute in attributes if attribute.is_local and not attribute.ref %} - - /** - * {{attribute.brief | render_markdown(code="{{@code {0}}}", paragraph="{0}")}} - {%- if attribute.note %} - * - *

Notes: -

    {{attribute.note | render_markdown(code="{{@code {0}}}", paragraph="
  • {0}
  • ", list="{0}")}}
- {%- endif %} - {%- if (attribute.stability | string()) == "StabilityLevel.DEPRECATED" %} - * - * @deprecated {{attribute.brief | to_doc_brief}}. - {%- endif %} - */ - {%- if (attribute.stability | string()) == "StabilityLevel.DEPRECATED" %} - @Deprecated - {%- endif %} - public static final AttributeKey<{{upFirst(to_java_return_type(attribute.attr_type | string))}}> {{attribute.fqn | to_const_name}} = {{to_java_key_type(attribute.attr_type | string)}}("{{attribute.fqn}}"); - {%- endfor %} - - // Enum definitions - {%- for attribute in attributes if attribute.is_local and not attribute.ref %} - {%- if attribute.is_enum %} - {%- set class_name = attribute.fqn | to_camelcase(True) ~ "Values" %} - {%- set type = to_java_return_type(attribute.attr_type.enum_type) %} - public static final class {{class_name}} { - {%- for member in attribute.attr_type.members %} - /** {% filter escape %}{{member.brief | to_doc_brief}}.{% endfilter %} */ - public static final {{ type }} {{ member.member_id | to_const_name }} = {{ print_value(type, member.value) }}; - - {%- endfor %} - - {%- if class_name == "NetTransportValues" %} - /** @deprecated This item has been removed as of 1.13.0 of the semantic conventions. */ - @Deprecated - public static final String IP = "ip"; - - /** @deprecated This item has been removed as of 1.13.0 of the semantic conventions. */ - @Deprecated - public static final String UNIX = "unix"; - {%- endif %} - - private {{ class_name }}() {} - } - - {% endif %} - {%- endfor %} - - {%- if class == "SemanticAttributes" %} - // Manually defined and not YET in the YAML - /** - * The name of an event describing an exception. - * - *

Typically an event with that name should not be manually created. Instead {@link - * io.opentelemetry.api.trace.Span#recordException(Throwable)} should be used. - */ - public static final String EXCEPTION_EVENT_NAME = "exception"; - - /** - * The name of the keyspace being accessed. - * - * @deprecated this item has been removed as of 1.8.0 of the semantic conventions. Please use {@link SemanticAttributes#DB_NAME} instead. - */ - @Deprecated - public static final AttributeKey DB_CASSANDRA_KEYSPACE = - stringKey("db.cassandra.keyspace"); - - /** - * The HBase namespace being accessed. - * - * @deprecated this item has been removed as of 1.8.0 of the semantic conventions. Please use {@link SemanticAttributes#DB_NAME} instead. - */ - @Deprecated - public static final AttributeKey DB_HBASE_NAMESPACE = stringKey("db.hbase.namespace"); - - /** - * The size of the uncompressed request payload body after transport decoding. Not set if - * transport encoding not used. - * - * @deprecated this item has been removed as of 1.13.0 of the semantic conventions. Please use {@link SemanticAttributes#HTTP_REQUEST_CONTENT_LENGTH} instead. - */ - @Deprecated - public static final AttributeKey HTTP_REQUEST_CONTENT_LENGTH_UNCOMPRESSED = - longKey("http.request_content_length_uncompressed"); - - /** - * @deprecated This item has been removed as of 1.13.0 of the semantic conventions. Please use {@link SemanticAttributes#HTTP_RESPONSE_CONTENT_LENGTH} instead. - */ - @Deprecated - public static final AttributeKey HTTP_RESPONSE_CONTENT_LENGTH_UNCOMPRESSED = - longKey("http.response_content_length_uncompressed"); - - /** - * @deprecated This item has been removed as of 1.13.0 of the semantic conventions. Please use - * {@link SemanticAttributes#NET_HOST_NAME} instead. - */ - @Deprecated - public static final AttributeKey HTTP_SERVER_NAME = stringKey("http.server_name"); - - /** - * @deprecated This item has been removed as of 1.13.0 of the semantic conventions. Please use - * {@link SemanticAttributes#NET_HOST_NAME} instead. - */ - @Deprecated - public static final AttributeKey HTTP_HOST = stringKey("http.host"); - - /** - * @deprecated This item has been removed as of 1.13.0 of the semantic conventions. Please use {@link SemanticAttributes#NET_SOCK_PEER_ADDR} instead. - */ - @Deprecated - public static final AttributeKey NET_PEER_IP = stringKey("net.peer.ip"); - - /** - * @deprecated This item has been removed as of 1.13.0 of the semantic conventions. Please use {@link SemanticAttributes#NET_SOCK_HOST_ADDR} instead. - */ - @Deprecated - public static final AttributeKey NET_HOST_IP = stringKey("net.host.ip"); - - /** - * The ordinal number of request re-sending attempt. - * @deprecated This item has been removed as of 1.15.0 of the semantic conventions. Use {@link SemanticAttributes#HTTP_RESEND_COUNT} instead. - */ - @Deprecated - public static final AttributeKey HTTP_RETRY_COUNT = longKey("http.retry_count"); - - - /** - * A string identifying the messaging system. - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link SemanticAttributes#MESSAGING_DESTINATION_NAME} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_DESTINATION = - stringKey("messaging.destination"); - - /** - * A boolean that is true if the message destination is temporary. - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link SemanticAttributes#MESSAGING_DESTINATION_TEMPORARY} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_TEMP_DESTINATION = - booleanKey("messaging.temp_destination"); - - /** - * The name of the transport protocol. - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link SemanticAttributes#NET_PROTOCOL_NAME} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_PROTOCOL = stringKey("messaging.protocol"); - - /** - * The version of the transport protocol. - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link SemanticAttributes#NET_PROTOCOL_VERSION} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_PROTOCOL_VERSION = - stringKey("messaging.protocol_version"); - - /** - * Connection string. - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. There is no replacement. - */ - @Deprecated - public static final AttributeKey MESSAGING_URL = stringKey("messaging.url"); - - /** - * The conversation ID identifying the conversation to which the - * message belongs, represented as a string. Sometimes called "Correlation ID". - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link SemanticAttributes#MESSAGING_MESSAGE_CONVERSATION_ID} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_CONVERSATION_ID = - stringKey("messaging.conversation_id"); - - /** - * RabbitMQ message routing key. - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link SemanticAttributes#MESSAGING_RABBITMQ_DESTINATION_ROUTING_KEY} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_RABBITMQ_ROUTING_KEY = - stringKey("messaging.rabbitmq.routing_key"); - - /** - * Partition the message is received from. - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link SemanticAttributes#MESSAGING_KAFKA_SOURCE_PARTITION} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_KAFKA_PARTITION = - longKey("messaging.kafka.partition"); - - /** - * A boolean that is true if the message is a tombstone. - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link SemanticAttributes#MESSAGING_KAFKA_MESSAGE_TOMBSTONE} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_KAFKA_TOMBSTONE = - booleanKey("messaging.kafka.tombstone"); - - /** - * The timestamp in milliseconds that the delay message is expected to be delivered to consumer. - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link SemanticAttributes#MESSAGING_ROCKETMQ_MESSAGE_DELIVERY_TIMESTAMP} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_ROCKETMQ_DELIVERY_TIMESTAMP = - longKey("messaging.rocketmq.delivery_timestamp"); - - - /** - * The delay time level for delay message, which determines the message delay time. - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link SemanticAttributes#MESSAGING_ROCKETMQ_MESSAGE_DELAY_TIME_LEVEL} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_ROCKETMQ_DELAY_TIME_LEVEL = - longKey("messaging.rocketmq.delay_time_level"); - - /** - * The name of the instrumentation scope - ({@code InstrumentationScope.Name} in OTLP). - * @deprecated This item has been moved, use {@link io.opentelemetry.semconv.resource.attributes.ResourceAttributes#OTEL_SCOPE_NAME} instead. - */ - @Deprecated - public static final AttributeKey OTEL_SCOPE_NAME = stringKey("otel.scope.name"); - - /** - * The version of the instrumentation scope - ({@code InstrumentationScope.Version} in OTLP). - * @deprecated This item has been moved, use {@link io.opentelemetry.semconv.resource.attributes.ResourceAttributes#OTEL_SCOPE_VERSION} instead. - */ - @Deprecated - public static final AttributeKey OTEL_SCOPE_VERSION = stringKey("otel.scope.version"); - - /** - * The execution ID of the current function execution. - * @deprecated This item has been renamed in 1.19.0 version of the semantic conventions. - * Use {@link SemanticAttributes#FAAS_INVOCATION_ID} instead. - */ - @Deprecated - public static final AttributeKey FAAS_EXECUTION = stringKey("faas.execution"); - - /** - * Value of the HTTP - * User-Agent header sent by the client. - * @deprecated This item has been renamed in 1.19.0 version of the semantic conventions. - * Use {@link SemanticAttributes#USER_AGENT_ORIGINAL} instead. - */ - @Deprecated - public static final AttributeKey HTTP_USER_AGENT = stringKey("http.user_agent"); - - /** - * Deprecated. - * - * @deprecated Deprecated, use the {@link io.opentelemetry.semconv.resource.attributes.ResourceAttributes#OTEL_SCOPE_NAME} attribute. - */ - @Deprecated - public static final AttributeKey OTEL_LIBRARY_NAME = stringKey("otel.library.name"); - - /** - * Deprecated. - * - * @deprecated Deprecated, use the {@link io.opentelemetry.semconv.resource.attributes.ResourceAttributes#OTEL_SCOPE_VERSION} attribute. - */ - @Deprecated - public static final AttributeKey OTEL_LIBRARY_VERSION = stringKey("otel.library.version"); - - /** - * Kind of HTTP protocol used. - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. - */ - @Deprecated - public static final AttributeKey HTTP_FLAVOR = stringKey("http.flavor"); - - /** - * Enum definitions for {@link #HTTP_FLAVOR}. - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. - */ - @Deprecated - public static final class HttpFlavorValues { - /** HTTP/1.0. */ - public static final String HTTP_1_0 = "1.0"; - - /** HTTP/1.1. */ - public static final String HTTP_1_1 = "1.1"; - - /** HTTP/2. */ - public static final String HTTP_2_0 = "2.0"; - - /** HTTP/3. */ - public static final String HTTP_3_0 = "3.0"; - - /** SPDY protocol. */ - public static final String SPDY = "SPDY"; - - /** QUIC protocol. */ - public static final String QUIC = "QUIC"; - - private HttpFlavorValues() {} - } - - /** - * Application layer protocol used. The value SHOULD be normalized to lowercase. - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. Use {@link SemanticAttributes#NET_PROTOCOL_NAME} instead. - */ - @Deprecated - public static final AttributeKey NET_APP_PROTOCOL_NAME = stringKey("net.app.protocol.name"); - - /** - * Version of the application layer protocol used. See note below. - * - *

Notes: - * - *

    - *
  • {@code net.app.protocol.version} refers to the version of the protocol used and might be - * different from the protocol client's version. If the HTTP client used has a version of - * {@code 0.27.2}, but sends HTTP version {@code 1.1}, this attribute should be set to - * {@code 1.1}. - *
- * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. Use {@link SemanticAttributes#NET_PROTOCOL_VERSION} instead. - */ - @Deprecated - public static final AttributeKey NET_APP_PROTOCOL_VERSION = stringKey("net.app.protocol.version"); - - /** - * The kind of message destination. - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. - */ - @Deprecated - public static final AttributeKey MESSAGING_DESTINATION_KIND = stringKey("messaging.destination.kind"); - - /** - * Enum values for {@link #MESSAGING_DESTINATION_KIND}. - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. - */ - @Deprecated - public static final class MessagingDestinationKindValues { - /** A message sent to a queue. */ - public static final String QUEUE = "queue"; - - /** A message sent to a topic. */ - public static final String TOPIC = "topic"; - - private MessagingDestinationKindValues() {} - } - - /** - * The kind of message source. - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. - */ - @Deprecated - public static final AttributeKey MESSAGING_SOURCE_KIND = stringKey("messaging.source.kind"); - - /** - * Enum values for {@link #MESSAGING_SOURCE_KIND}. - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. - */ - @Deprecated - public static final class MessagingSourceKindValues { - /** A message received from a queue. */ - public static final String QUEUE = "queue"; - - /** A message received from a topic. */ - public static final String TOPIC = "topic"; - - private MessagingSourceKindValues() {} - } - - {% endif %} - - {%- if class == "ResourceAttributes" %} - - /** - * Red Hat OpenShift on Google Cloud. - * @deprecated This item has been removed as of 1.18.0 of the semantic conventions. Use {@link ResourceAttributes#GCP_OPENSHIFT} instead. - */ - @Deprecated - public static final String GCP_OPENSHIFT = "gcp_openshift"; - - /** - * Full user-agent string provided by the browser - * - *

Notes: - * - *

    - *
  • The user-agent value SHOULD be provided only from browsers that do not have a mechanism - * to retrieve brands and platform individually from the User-Agent Client Hints API. To - * retrieve the value, the legacy {@code navigator.userAgent} API can be used. - *
- * @deprecated This item has been renamed in 1.19.0 version of the semantic conventions. Use {@link io.opentelemetry.semconv.trace.attributes.SemanticAttributes#USER_AGENT_ORIGINAL} instead. - */ - @Deprecated - public static final AttributeKey BROWSER_USER_AGENT = stringKey("browser.user_agent"); - - /** - * The unique ID of the single function that this runtime instance executes. - * - *

Notes: - * - *

    - *
  • On some cloud providers, it may not be possible to determine the full ID at startup, so - * consider setting {@code faas.id} as a span attribute instead. - *
  • The exact value to use for {@code faas.id} depends on the cloud provider: - *
  • AWS Lambda: The function ARN. - * Take care not to use the "invoked ARN" directly but replace any alias - * suffix with the resolved function version, as the same runtime instance may be - * invokable with multiple different aliases. - *
  • GCP: The URI of the resource - *
  • Azure: The Fully - * Qualified Resource ID of the invoked function, not the function app, having - * the form {@code - * /subscriptions//resourceGroups//providers/Microsoft.Web/sites//functions/}. - * This means that a span attribute MUST be used, as an Azure function app can host multiple - * functions that would usually share a TracerProvider. - *
- * @deprecated This item has been removed in 1.19.0 version of the semantic conventions. Use {@link ResourceAttributes#CLOUD_RESOURCE_ID} instead. - */ - @Deprecated - public static final AttributeKey FAAS_ID = stringKey("faas.id"); - - {% endif %} - - private {{class}}() {} -} diff --git a/context/build.gradle.kts b/context/build.gradle.kts index a0e17cb0145..6c8fa63837c 100644 --- a/context/build.gradle.kts +++ b/context/build.gradle.kts @@ -16,6 +16,16 @@ dependencies { testImplementation("com.google.guava:guava") } +dependencyCheck { + skipConfigurations.add("braveInOtelTestAnnotationProcessor") + skipConfigurations.add("grpcInOtelTestAnnotationProcessor") + skipConfigurations.add("otelAsBraveTestAnnotationProcessor") + skipConfigurations.add("otelInBraveTestAnnotationProcessor") + skipConfigurations.add("otelInGrpcTestAnnotationProcessor") + skipConfigurations.add("storageWrappersTestAnnotationProcessor") + skipConfigurations.add("strictContextEnabledTestAnnotationProcessor") +} + testing { suites { register("grpcInOtelTest") { diff --git a/context/src/braveInOtelTest/java/io/opentelemetry/context/BraveInOtelTest.java b/context/src/braveInOtelTest/java/io/opentelemetry/context/BraveInOtelTest.java index efaf20562d0..13298a341e2 100644 --- a/context/src/braveInOtelTest/java/io/opentelemetry/context/BraveInOtelTest.java +++ b/context/src/braveInOtelTest/java/io/opentelemetry/context/BraveInOtelTest.java @@ -27,6 +27,7 @@ class BraveInOtelTest { private static final TraceContext TRACE_CONTEXT = TraceContext.newBuilder().traceId(1).spanId(1).addExtra("japan").build(); + @SuppressWarnings("NonFinalStaticField") private static ExecutorService otherThread; @BeforeAll diff --git a/context/src/grpcInOtelTest/java/io/opentelemetry/context/GrpcInOtelTest.java b/context/src/grpcInOtelTest/java/io/opentelemetry/context/GrpcInOtelTest.java index 265c623ed4f..4d22ed765ee 100644 --- a/context/src/grpcInOtelTest/java/io/opentelemetry/context/GrpcInOtelTest.java +++ b/context/src/grpcInOtelTest/java/io/opentelemetry/context/GrpcInOtelTest.java @@ -21,6 +21,7 @@ class GrpcInOtelTest { private static final io.grpc.Context.Key FOOD = io.grpc.Context.key("food"); private static final io.grpc.Context.Key COUNTRY = io.grpc.Context.key("country"); + @SuppressWarnings("NonFinalStaticField") private static ExecutorService otherThread; @BeforeAll diff --git a/context/src/main/java/io/opentelemetry/context/Context.java b/context/src/main/java/io/opentelemetry/context/Context.java index d9c9f50c889..bb9a48d6039 100644 --- a/context/src/main/java/io/opentelemetry/context/Context.java +++ b/context/src/main/java/io/opentelemetry/context/Context.java @@ -27,6 +27,7 @@ import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Consumer; @@ -135,9 +136,37 @@ static Executor taskWrapping(Executor executor) { * @since 1.1.0 */ static ExecutorService taskWrapping(ExecutorService executorService) { + if (executorService instanceof CurrentContextExecutorService) { + return executorService; + } return new CurrentContextExecutorService(executorService); } + /** + * Returns an {@link ScheduledExecutorService} which delegates to the provided {@code + * executorService}, wrapping all invocations of {@link ExecutorService} methods such as {@link + * ExecutorService#execute(Runnable)} or {@link ExecutorService#submit(Runnable)} with the + * {@linkplain Context#current() current context} at the time of invocation. + * + *

This is generally used to create an {@link ScheduledExecutorService} which will forward the + * {@link Context} during an invocation to another thread. For example, you may use something like + * {@code ScheduledExecutorService dbExecutor = Context.wrapTasks(threadPool)} to ensure calls + * like {@code dbExecutor.execute(() -> database.query())} have {@link Context} available on the + * thread executing database queries. + * + *

Note: The context will not be propagated for {@link + * ScheduledExecutorService#scheduleAtFixedRate(Runnable, long, long, TimeUnit)} and {@link + * ScheduledExecutorService#scheduleWithFixedDelay(Runnable, long, long, TimeUnit)} calls. + * + * @since 1.43.0 + */ + static ScheduledExecutorService taskWrapping(ScheduledExecutorService executorService) { + if (executorService instanceof CurrentContextScheduledExecutorService) { + return executorService; + } + return new CurrentContextScheduledExecutorService(executorService); + } + /** * Returns the value stored in this {@link Context} for the given {@link ContextKey}, or {@code * null} if there is no value for the key in this context. diff --git a/context/src/main/java/io/opentelemetry/context/ContextStorageWrappers.java b/context/src/main/java/io/opentelemetry/context/ContextStorageWrappers.java index 9f8eb3276a1..faa05a66c0b 100644 --- a/context/src/main/java/io/opentelemetry/context/ContextStorageWrappers.java +++ b/context/src/main/java/io/opentelemetry/context/ContextStorageWrappers.java @@ -19,6 +19,7 @@ final class ContextStorageWrappers { private static final Logger log = Logger.getLogger(ContextStorageWrappers.class.getName()); + @SuppressWarnings("NonFinalStaticField") private static boolean storageInitialized; private static final List> wrappers = diff --git a/context/src/main/java/io/opentelemetry/context/CurrentContextExecutorService.java b/context/src/main/java/io/opentelemetry/context/CurrentContextExecutorService.java index 346b8498781..f3e5f3aa81b 100644 --- a/context/src/main/java/io/opentelemetry/context/CurrentContextExecutorService.java +++ b/context/src/main/java/io/opentelemetry/context/CurrentContextExecutorService.java @@ -14,7 +14,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -final class CurrentContextExecutorService extends ForwardingExecutorService { +class CurrentContextExecutorService extends ForwardingExecutorService { CurrentContextExecutorService(ExecutorService delegate) { super(delegate); diff --git a/context/src/main/java/io/opentelemetry/context/CurrentContextScheduledExecutorService.java b/context/src/main/java/io/opentelemetry/context/CurrentContextScheduledExecutorService.java new file mode 100644 index 00000000000..f32b1faf2a8 --- /dev/null +++ b/context/src/main/java/io/opentelemetry/context/CurrentContextScheduledExecutorService.java @@ -0,0 +1,44 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.context; + +import java.util.concurrent.Callable; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; + +final class CurrentContextScheduledExecutorService extends CurrentContextExecutorService + implements ScheduledExecutorService { + + private final ScheduledExecutorService delegate; + + CurrentContextScheduledExecutorService(ScheduledExecutorService delegate) { + super(delegate); + this.delegate = delegate; + } + + @Override + public ScheduledFuture schedule(Runnable command, long delay, TimeUnit unit) { + return delegate.schedule(Context.current().wrap(command), delay, unit); + } + + @Override + public ScheduledFuture schedule(Callable callable, long delay, TimeUnit unit) { + return delegate.schedule(Context.current().wrap(callable), delay, unit); + } + + @Override + public ScheduledFuture scheduleAtFixedRate( + Runnable command, long initialDelay, long period, TimeUnit unit) { + return delegate.scheduleAtFixedRate(command, initialDelay, period, unit); + } + + @Override + public ScheduledFuture scheduleWithFixedDelay( + Runnable command, long initialDelay, long delay, TimeUnit unit) { + return delegate.scheduleWithFixedDelay(command, initialDelay, delay, unit); + } +} diff --git a/context/src/main/java/io/opentelemetry/context/StrictContextStorage.java b/context/src/main/java/io/opentelemetry/context/StrictContextStorage.java index b02d33b4aea..aeed325c364 100644 --- a/context/src/main/java/io/opentelemetry/context/StrictContextStorage.java +++ b/context/src/main/java/io/opentelemetry/context/StrictContextStorage.java @@ -23,6 +23,7 @@ import static java.lang.Thread.currentThread; import io.opentelemetry.context.internal.shaded.WeakConcurrentMap; +import java.lang.ref.Reference; import java.util.Arrays; import java.util.List; import java.util.concurrent.ConcurrentHashMap; @@ -266,7 +267,9 @@ List drainPendingCallers() { public void run() { try { while (!Thread.interrupted()) { - CallerStackTrace caller = map.remove(remove()); + Reference reference = remove(); + // on openj9 ReferenceQueue.remove can return null + CallerStackTrace caller = reference != null ? map.remove(reference) : null; if (caller != null && !caller.closed) { logger.log( Level.SEVERE, "Scope garbage collected before being closed.", callerError(caller)); diff --git a/context/src/main/java/io/opentelemetry/context/propagation/internal/ExtendedTextMapGetter.java b/context/src/main/java/io/opentelemetry/context/propagation/internal/ExtendedTextMapGetter.java new file mode 100644 index 00000000000..d64604757db --- /dev/null +++ b/context/src/main/java/io/opentelemetry/context/propagation/internal/ExtendedTextMapGetter.java @@ -0,0 +1,44 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.context.propagation.internal; + +import io.opentelemetry.context.propagation.TextMapGetter; +import java.util.Collections; +import java.util.Iterator; +import javax.annotation.Nullable; + +/** + * Extends {@link TextMapGetter} to return possibly multiple values for a given key. + * + *

This class is internal and experimental. Its APIs are unstable and can change at any time. Its + * APIs (or a version of them) may be promoted to the public stable API in the future, but no + * guarantees are made. + * + * @param carrier of propagation fields, such as an http request. + */ +public interface ExtendedTextMapGetter extends TextMapGetter { + /** + * If implemented, returns all values for a given {@code key} in order, or returns an empty list. + * + *

The default method returns the first value of the given propagation {@code key} as a + * singleton list, or returns an empty list. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + * @param carrier carrier of propagation fields, such as an http request. + * @param key the key of the field. + * @return all values for a given {@code key} in order, or returns an empty list. Default method + * wraps {@code get()} as an {@link Iterator}. + */ + default Iterator getAll(@Nullable C carrier, String key) { + String first = get(carrier, key); + if (first == null) { + return Collections.emptyIterator(); + } + return Collections.singleton(first).iterator(); + } +} diff --git a/context/src/otelAsBraveTest/java/io/opentelemetry/context/OtelAsBraveTest.java b/context/src/otelAsBraveTest/java/io/opentelemetry/context/OtelAsBraveTest.java index 6acad3d0e7b..f3498727995 100644 --- a/context/src/otelAsBraveTest/java/io/opentelemetry/context/OtelAsBraveTest.java +++ b/context/src/otelAsBraveTest/java/io/opentelemetry/context/OtelAsBraveTest.java @@ -27,6 +27,7 @@ class OtelAsBraveTest { private static final TraceContext TRACE_CONTEXT = TraceContext.newBuilder().traceId(1).spanId(1).addExtra("japan").build(); + @SuppressWarnings("NonFinalStaticField") private static ExecutorService otherThread; @BeforeAll diff --git a/context/src/otelInBraveTest/java/io/opentelemetry/context/OtelInBraveTest.java b/context/src/otelInBraveTest/java/io/opentelemetry/context/OtelInBraveTest.java index 72a8b402acf..232cb0c4755 100644 --- a/context/src/otelInBraveTest/java/io/opentelemetry/context/OtelInBraveTest.java +++ b/context/src/otelInBraveTest/java/io/opentelemetry/context/OtelInBraveTest.java @@ -28,6 +28,7 @@ class OtelInBraveTest { BraveContextStorageProvider.toBraveContext( TraceContext.newBuilder().traceId(1).spanId(1).build(), CONTEXT_WITH_ANIMAL); + @SuppressWarnings("NonFinalStaticField") private static ExecutorService otherThread; @BeforeAll diff --git a/context/src/otelInGrpcTest/java/io/opentelemetry/context/OtelInGrpcTest.java b/context/src/otelInGrpcTest/java/io/opentelemetry/context/OtelInGrpcTest.java index 6b5cf16dabb..6024100b986 100644 --- a/context/src/otelInGrpcTest/java/io/opentelemetry/context/OtelInGrpcTest.java +++ b/context/src/otelInGrpcTest/java/io/opentelemetry/context/OtelInGrpcTest.java @@ -21,6 +21,7 @@ class OtelInGrpcTest { private static final io.grpc.Context.Key FOOD = io.grpc.Context.key("food"); private static final io.grpc.Context.Key COUNTRY = io.grpc.Context.key("country"); + @SuppressWarnings("NonFinalStaticField") private static ExecutorService otherThread; @BeforeAll diff --git a/context/src/test/java/io/opentelemetry/context/ContextTest.java b/context/src/test/java/io/opentelemetry/context/ContextTest.java index d8a7a0b7bf8..3753e89bf30 100644 --- a/context/src/test/java/io/opentelemetry/context/ContextTest.java +++ b/context/src/test/java/io/opentelemetry/context/ContextTest.java @@ -9,6 +9,9 @@ import static org.awaitility.Awaitility.await; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; @@ -28,6 +31,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import java.util.concurrent.atomic.LongAdder; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Consumer; @@ -118,7 +122,7 @@ void newThreadStartsWithRoot() throws Exception { } @Test - public void closingScopeWhenNotActiveIsNoopAndLogged() { + void closingScopeWhenNotActiveIsNoopAndLogged() { Context initial = Context.current(); Context context = initial.with(ANIMAL, "cat"); try (Scope scope = context.makeCurrent()) { @@ -137,7 +141,7 @@ public void closingScopeWhenNotActiveIsNoopAndLogged() { @SuppressWarnings("MustBeClosedChecker") @Test - public void closeScopeIsIdempotent() { + void closeScopeIsIdempotent() { Context initial = Context.current(); Context context1 = Context.root().with(ANIMAL, "cat"); Scope scope1 = context1.makeCurrent(); @@ -188,11 +192,10 @@ void withValues() { assertThat(context5).isSameAs(context4); String dog = new String("dog"); - assertThat(dog).isEqualTo("dog"); - assertThat(dog).isNotSameAs("dog"); + assertThat(dog).isEqualTo("dog").isNotSameAs("dog"); Context context6 = context5.with(ANIMAL, dog); assertThat(context6.get(ANIMAL)).isEqualTo("dog"); - // We reuse context object when values match by reference, not value. + // We reuse the context object when values match by reference, not value. assertThat(context6).isNotSameAs(context5); } @@ -234,7 +237,7 @@ void wrapCallable() throws Exception { void wrapFunction() { AtomicReference value = new AtomicReference<>(); Function callback = - (a) -> { + a -> { value.set(Context.current().get(ANIMAL)); return "foo"; }; @@ -273,7 +276,7 @@ void wrapConsumer() { AtomicReference value = new AtomicReference<>(); AtomicBoolean consumed = new AtomicBoolean(); Consumer callback = - (a) -> { + a -> { value.set(Context.current().get(ANIMAL)); consumed.set(true); }; @@ -362,7 +365,7 @@ void wrapExecutor() { @TestInstance(Lifecycle.PER_CLASS) class WrapExecutorService { - protected ScheduledExecutorService executor; + protected ExecutorService executor; protected ExecutorService wrapped; protected AtomicReference value; @@ -501,6 +504,204 @@ void invokeAnyTimeout() throws Exception { } } + @Nested + @TestInstance(Lifecycle.PER_CLASS) + class WrapScheduledExecutorService { + + protected ScheduledExecutorService executor; + protected ScheduledExecutorService wrapped; + protected AtomicReference value; + + protected ScheduledExecutorService wrap(ScheduledExecutorService executorService) { + return CAT.wrap(executorService); + } + + @BeforeAll + void initExecutor() { + executor = Executors.newSingleThreadScheduledExecutor(); + wrapped = wrap(executor); + } + + @AfterAll + void stopExecutor() { + executor.shutdown(); + } + + @BeforeEach + void setUp() { + value = new AtomicReference<>(); + } + + @Test + void execute() { + Runnable runnable = () -> value.set(Context.current().get(ANIMAL)); + wrapped.execute(runnable); + await().untilAsserted(() -> assertThat(value).hasValue("cat")); + } + + @Test + void submitRunnable() { + Runnable runnable = () -> value.set(Context.current().get(ANIMAL)); + Futures.getUnchecked(wrapped.submit(runnable)); + assertThat(value).hasValue("cat"); + } + + @Test + void submitRunnableResult() { + Runnable runnable = () -> value.set(Context.current().get(ANIMAL)); + assertThat(Futures.getUnchecked(wrapped.submit(runnable, "foo"))).isEqualTo("foo"); + assertThat(value).hasValue("cat"); + } + + @Test + void submitCallable() { + Callable callable = + () -> { + value.set(Context.current().get(ANIMAL)); + return "foo"; + }; + assertThat(Futures.getUnchecked(wrapped.submit(callable))).isEqualTo("foo"); + assertThat(value).hasValue("cat"); + } + + @Test + void invokeAll() throws Exception { + AtomicReference value1 = new AtomicReference<>(); + AtomicReference value2 = new AtomicReference<>(); + Callable callable1 = + () -> { + value1.set(Context.current().get(ANIMAL)); + return "foo"; + }; + Callable callable2 = + () -> { + value2.set(Context.current().get(ANIMAL)); + return "bar"; + }; + List> futures = wrapped.invokeAll(Arrays.asList(callable1, callable2)); + assertThat(futures.get(0).get()).isEqualTo("foo"); + assertThat(futures.get(1).get()).isEqualTo("bar"); + assertThat(value1).hasValue("cat"); + assertThat(value2).hasValue("cat"); + } + + @Test + void invokeAllTimeout() throws Exception { + AtomicReference value1 = new AtomicReference<>(); + AtomicReference value2 = new AtomicReference<>(); + Callable callable1 = + () -> { + value1.set(Context.current().get(ANIMAL)); + return "foo"; + }; + Callable callable2 = + () -> { + value2.set(Context.current().get(ANIMAL)); + return "bar"; + }; + List> futures = + wrapped.invokeAll(Arrays.asList(callable1, callable2), 10, TimeUnit.SECONDS); + assertThat(futures.get(0).get()).isEqualTo("foo"); + assertThat(futures.get(1).get()).isEqualTo("bar"); + assertThat(value1).hasValue("cat"); + assertThat(value2).hasValue("cat"); + } + + @Test + void invokeAny() throws Exception { + AtomicReference value1 = new AtomicReference<>(); + AtomicReference value2 = new AtomicReference<>(); + Callable callable1 = + () -> { + value1.set(Context.current().get(ANIMAL)); + throw new IllegalStateException("callable2 wins"); + }; + Callable callable2 = + () -> { + value2.set(Context.current().get(ANIMAL)); + return "bar"; + }; + assertThat(wrapped.invokeAny(Arrays.asList(callable1, callable2))).isEqualTo("bar"); + assertThat(value1).hasValue("cat"); + assertThat(value2).hasValue("cat"); + } + + @Test + void invokeAnyTimeout() throws Exception { + AtomicReference value1 = new AtomicReference<>(); + AtomicReference value2 = new AtomicReference<>(); + Callable callable1 = + () -> { + value1.set(Context.current().get(ANIMAL)); + throw new IllegalStateException("callable2 wins"); + }; + Callable callable2 = + () -> { + value2.set(Context.current().get(ANIMAL)); + return "bar"; + }; + assertThat(wrapped.invokeAny(Arrays.asList(callable1, callable2), 10, TimeUnit.SECONDS)) + .isEqualTo("bar"); + assertThat(value1).hasValue("cat"); + assertThat(value2).hasValue("cat"); + } + + @Test + void scheduleRunnable() { + Runnable runnable = () -> value.set(Context.current().get(ANIMAL)); + assertThat(Futures.getUnchecked(wrapped.schedule(runnable, 1L, TimeUnit.MILLISECONDS))) + .isNull(); + assertThat(value).hasValue("cat"); + } + + @Test + void scheduleCallable() { + Callable callable = + () -> { + value.set(Context.current().get(ANIMAL)); + return "foo"; + }; + assertThat(Futures.getUnchecked(wrapped.schedule(callable, 1L, TimeUnit.MILLISECONDS))) + .isEqualTo("foo"); + assertThat(value).hasValue("cat"); + } + + @Test + void scheduleAtFixedRate() { + LongAdder longAdder = new LongAdder(); + Runnable runnable = longAdder::increment; + Future future = wrapped.scheduleAtFixedRate(runnable, 1L, 2L, TimeUnit.NANOSECONDS); + assertThat(future).isNotNull(); + await() + .await() + .untilAsserted( + () -> { + if (!future.isCancelled()) { + future.cancel(true); + } + assertThat(longAdder.intValue()).isGreaterThan(1); + }); + assertThat(longAdder.intValue()).isGreaterThan(1); + } + + @Test + void scheduleWithFixedDelay() { + LongAdder longAdder = new LongAdder(); + Runnable runnable = longAdder::increment; + Future future = wrapped.scheduleWithFixedDelay(runnable, 1L, 2L, TimeUnit.NANOSECONDS); + assertThat(future).isNotNull(); + await() + .await() + .untilAsserted( + () -> { + if (!future.isCancelled()) { + future.cancel(true); + } + assertThat(longAdder.intValue()).isGreaterThan(1); + }); + } + } + @Nested @TestInstance(Lifecycle.PER_CLASS) class CurrentContextWrappingExecutorService extends WrapExecutorService { @@ -525,9 +726,34 @@ void close() { } } + @Nested + @TestInstance(Lifecycle.PER_CLASS) + class CurrentContextWrappingScheduledExecutorService extends WrapScheduledExecutorService { + + @Override + protected ScheduledExecutorService wrap(ScheduledExecutorService executorService) { + return Context.taskWrapping(executorService); + } + + private Scope scope; + + @BeforeEach + // Closed in AfterEach + @SuppressWarnings("MustBeClosedChecker") + void makeCurrent() { + scope = CAT.makeCurrent(); + } + + @AfterEach + void close() { + scope.close(); + scope = null; + } + } + @Test void keyToString() { - assertThat(ANIMAL.toString()).isEqualTo("animal"); + assertThat(ANIMAL).hasToString("animal"); } @Test @@ -552,6 +778,7 @@ class DelegatesToExecutorService { @Test void delegatesCleanupMethods() throws Exception { ExecutorService wrapped = CAT.wrap(executor); + doNothing().when(executor).shutdown(); wrapped.shutdown(); verify(executor).shutdown(); verifyNoMoreInteractions(executor); @@ -573,57 +800,74 @@ void delegatesCleanupMethods() throws Exception { } } + // We test real context-related above but should test cleanup gets delegated, which is best with + // a mock. @Nested @TestInstance(Lifecycle.PER_CLASS) - class WrapScheduledExecutorService extends WrapExecutorService { + @SuppressWarnings("MockitoDoSetup") + class DelegatesToScheduledExecutorService { - private ScheduledExecutorService wrapScheduled; - - @BeforeEach - void wrapScheduled() { - wrapScheduled = CAT.wrap(executor); - } + @Mock private ScheduledExecutorService executor; + @Mock private ScheduledFuture scheduledFuture; @Test - void scheduleRunnable() throws Exception { - Runnable runnable = () -> value.set(Context.current().get(ANIMAL)); - wrapScheduled.schedule(runnable, 0, TimeUnit.SECONDS).get(); - assertThat(value).hasValue("cat"); - } + void delegatesCleanupMethods() throws Exception { + ScheduledExecutorService wrapped = CAT.wrap(executor); - @Test - void scheduleCallable() throws Exception { - Callable callable = - () -> { - value.set(Context.current().get(ANIMAL)); - return "foo"; - }; - assertThat(wrapScheduled.schedule(callable, 0, TimeUnit.SECONDS).get()).isEqualTo("foo"); - assertThat(value).hasValue("cat"); - } + wrapped.shutdown(); + verify(executor).shutdown(); + verifyNoMoreInteractions(executor); - @Test - void scheduleAtFixedRate() { - Runnable runnable = () -> value.set(Context.current().get(ANIMAL)); - ScheduledFuture future = - wrapScheduled.scheduleAtFixedRate(runnable, 0, 10, TimeUnit.SECONDS); - await().untilAsserted(() -> assertThat(value).hasValue("cat")); - future.cancel(true); - } + wrapped.shutdownNow(); + verify(executor).shutdownNow(); + verifyNoMoreInteractions(executor); - @Test - void scheduleWithFixedDelay() { - Runnable runnable = () -> value.set(Context.current().get(ANIMAL)); - ScheduledFuture future = - wrapScheduled.scheduleWithFixedDelay(runnable, 0, 10, TimeUnit.SECONDS); - await().untilAsserted(() -> assertThat(value).hasValue("cat")); - future.cancel(true); + when(executor.isShutdown()).thenReturn(true); + assertThat(wrapped.isShutdown()).isTrue(); + verify(executor).isShutdown(); + verifyNoMoreInteractions(executor); + + when(wrapped.isTerminated()).thenReturn(true); + assertThat(wrapped.isTerminated()).isTrue(); + verify(executor).isTerminated(); + verifyNoMoreInteractions(executor); + + when(executor.awaitTermination(anyLong(), any())).thenReturn(true); + assertThat(wrapped.awaitTermination(1L, TimeUnit.SECONDS)).isTrue(); + verify(executor).awaitTermination(1L, TimeUnit.SECONDS); + verifyNoMoreInteractions(executor); + + doReturn(scheduledFuture) + .when(executor) + .schedule(any(Runnable.class), anyLong(), any(TimeUnit.class)); + assertThat((Future) wrapped.schedule(() -> {}, 1L, TimeUnit.SECONDS)) + .isSameAs(scheduledFuture); + verify(executor).schedule(any(Runnable.class), anyLong(), any(TimeUnit.class)); + verifyNoMoreInteractions(executor); + + doReturn(scheduledFuture) + .when(executor) + .scheduleAtFixedRate(any(Runnable.class), anyLong(), anyLong(), any(TimeUnit.class)); + assertThat((Future) wrapped.scheduleAtFixedRate(() -> {}, 1L, 1L, TimeUnit.SECONDS)) + .isSameAs(scheduledFuture); + verify(executor) + .scheduleAtFixedRate(any(Runnable.class), anyLong(), anyLong(), any(TimeUnit.class)); + verifyNoMoreInteractions(executor); + + doReturn(scheduledFuture) + .when(executor) + .scheduleWithFixedDelay(any(Runnable.class), anyLong(), anyLong(), any(TimeUnit.class)); + assertThat((Future) wrapped.scheduleWithFixedDelay(() -> {}, 1L, 1L, TimeUnit.SECONDS)) + .isSameAs(scheduledFuture); + verify(executor) + .scheduleWithFixedDelay(any(Runnable.class), anyLong(), anyLong(), any(TimeUnit.class)); + verifyNoMoreInteractions(executor); } } @Test void emptyContext() { - assertThat(Context.root().get(new HashCollidingKey())).isEqualTo(null); + assertThat(Context.root().get(new HashCollidingKey())).isNull(); } @Test @@ -646,6 +890,20 @@ void hashcodeCollidingKeys() { assertThat(twoKeys.get(cheese)).isEqualTo("whiz"); } + @Test + void doNotWrapExecutorService() { + ExecutorService executor = mock(CurrentContextExecutorService.class); + ExecutorService wrapped = Context.taskWrapping(executor); + assertThat(wrapped).isSameAs(executor); + } + + @Test + void doNotWrapScheduledExecutorService() { + ScheduledExecutorService executor = mock(CurrentContextScheduledExecutorService.class); + ScheduledExecutorService wrapped = Context.taskWrapping(executor); + assertThat(wrapped).isSameAs(executor); + } + @SuppressWarnings("HashCodeToString") private static class HashCollidingKey implements ContextKey { @Override diff --git a/context/src/test/java/io/opentelemetry/context/internal/shaded/WeakConcurrentMapTest.java b/context/src/test/java/io/opentelemetry/context/internal/shaded/WeakConcurrentMapTest.java index 3d635be779c..6adfb18821f 100644 --- a/context/src/test/java/io/opentelemetry/context/internal/shaded/WeakConcurrentMapTest.java +++ b/context/src/test/java/io/opentelemetry/context/internal/shaded/WeakConcurrentMapTest.java @@ -25,6 +25,7 @@ package io.opentelemetry.context.internal.shaded; +import static org.awaitility.Awaitility.await; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.nullValue; @@ -75,8 +76,7 @@ void testInternalThread() throws Exception { assertThat(map.getCleanerThread(), not(nullValue(Thread.class))); new MapTestCase(map).doTest(); map.getCleanerThread().interrupt(); - Thread.sleep(200L); - assertThat(map.getCleanerThread().isAlive(), is(false)); + await().untilAsserted(() -> assertThat(map.getCleanerThread().isAlive(), is(false))); } static class KeyEqualToWeakRefOfItself { @@ -152,8 +152,12 @@ void doTest() throws Exception { assertThat(values.isEmpty(), is(true)); key1 = key2 = null; // Make eligible for GC System.gc(); - Thread.sleep(200L); - triggerClean(); + await() + .untilAsserted( + () -> { + triggerClean(); + assertThat(map.approximateSize(), is(2)); + }); assertThat(map.get(key3), is(value3)); assertThat(map.getIfPresent(key3), is(value3)); assertThat(map.get(key4), is(value4)); diff --git a/context/src/test/java/io/opentelemetry/context/propagation/internal/ExtendedTextMapGetterTest.java b/context/src/test/java/io/opentelemetry/context/propagation/internal/ExtendedTextMapGetterTest.java new file mode 100644 index 00000000000..f8c35ea2ed9 --- /dev/null +++ b/context/src/test/java/io/opentelemetry/context/propagation/internal/ExtendedTextMapGetterTest.java @@ -0,0 +1,70 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.context.propagation.internal; + +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; + +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import javax.annotation.Nullable; +import org.junit.jupiter.api.Test; + +class ExtendedTextMapGetterTest { + + final ExtendedTextMapGetter nullGet = + new ExtendedTextMapGetter() { + @Override + public Iterable keys(Void carrier) { + return ImmutableList.of("key"); + } + + @Nullable + @Override + public String get(@Nullable Void carrier, String key) { + return null; + } + }; + + final ExtendedTextMapGetter nonNullGet = + new ExtendedTextMapGetter() { + @Override + public Iterable keys(Void carrier) { + return ImmutableList.of("key"); + } + + @Override + public String get(@Nullable Void carrier, String key) { + return "123"; + } + }; + + @Test + void extendedTextMapGetterdefaultMethod_returnsEmpty() { + Iterator result = nullGet.getAll(null, "key"); + assertThat(result).isNotNull(); + List values = iterToList(result); + assertThat(values).isEqualTo(Collections.emptyList()); + } + + @Test + void extendedTextMapGetterdefaultMethod_returnsSingleVal() { + Iterator result = nonNullGet.getAll(null, "key"); + assertThat(result).isNotNull(); + List values = iterToList(result); + assertThat(values).isEqualTo(Collections.singletonList("123")); + } + + private static List iterToList(Iterator iter) { + List list = new ArrayList<>(); + while (iter.hasNext()) { + list.add(iter.next()); + } + return list; + } +} diff --git a/custom-checks/build.gradle.kts b/custom-checks/build.gradle.kts new file mode 100644 index 00000000000..22ce0614b30 --- /dev/null +++ b/custom-checks/build.gradle.kts @@ -0,0 +1,87 @@ +plugins { + id("otel.java-conventions") +} + +dependencies { + compileOnly("com.google.errorprone:error_prone_core") + + testImplementation("com.google.errorprone:error_prone_test_helpers") +} + +otelJava.moduleName.set("io.opentelemetry.javaagent.customchecks") + +// We cannot use "--release" javac option here because that will forbid exporting com.sun.tools package. +// We also can't seem to use the toolchain without the "--release" option. So disable everything. + +java { + sourceCompatibility = JavaVersion.VERSION_17 + targetCompatibility = JavaVersion.VERSION_17 + toolchain { + languageVersion.set(null as JavaLanguageVersion?) + } +} + +tasks { + withType().configureEach { + with(options) { + release.set(null as Int?) + + compilerArgs.addAll( + listOf( + "--add-exports", + "jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED", + "--add-exports", + "jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED", + "--add-exports", + "jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED", + "--add-exports", + "jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED", + "--add-exports", + "jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED", + ), + ) + } + } + + // only test on java 17+ + val testJavaVersion: String? by project + if (testJavaVersion != null && Integer.valueOf(testJavaVersion) < 17) { + test { + enabled = false + } + } +} + +tasks.withType().configureEach { + // required on jdk17 + jvmArgs("--add-opens=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED") + jvmArgs("--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED") + jvmArgs("--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED") + jvmArgs("--add-opens=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED") + jvmArgs("--add-opens=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED") + jvmArgs("--add-opens=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED") + jvmArgs("--add-opens=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED") + jvmArgs("--add-opens=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED") + jvmArgs("-XX:+IgnoreUnrecognizedVMOptions") +} + +tasks.withType().configureEach { + // using com.sun.tools.javac.api.JavacTrees breaks javadoc generation + enabled = false +} + +// Our conventions apply this project as a dependency in the errorprone configuration, which would cause +// a circular dependency if trying to compile this project with that still there. So we filter this +// project out. +configurations { + named("errorprone") { + dependencies.removeIf { + it is ProjectDependency && it.dependencyProject == project + } + } +} + +// Skip OWASP dependencyCheck task on test module +dependencyCheck { + skip = true +} diff --git a/custom-checks/src/main/java/io/opentelemetry/gradle/customchecks/OtelInternalJavadoc.java b/custom-checks/src/main/java/io/opentelemetry/gradle/customchecks/OtelInternalJavadoc.java new file mode 100644 index 00000000000..0cc97d45abc --- /dev/null +++ b/custom-checks/src/main/java/io/opentelemetry/gradle/customchecks/OtelInternalJavadoc.java @@ -0,0 +1,81 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.gradle.customchecks; + +import static com.google.errorprone.BugPattern.SeverityLevel.WARNING; + +import com.google.errorprone.BugPattern; +import com.google.errorprone.VisitorState; +import com.google.errorprone.bugpatterns.BugChecker; +import com.google.errorprone.matchers.Description; +import com.sun.source.doctree.DocCommentTree; +import com.sun.source.tree.ClassTree; +import com.sun.source.tree.PackageTree; +import com.sun.tools.javac.api.JavacTrees; +import java.util.regex.Pattern; +import javax.annotation.Nullable; +import javax.lang.model.element.Modifier; + +@BugPattern( + summary = + "This public internal class doesn't end with any of the applicable javadoc disclaimers: \"" + + OtelInternalJavadoc.EXPECTED_INTERNAL_COMMENT_V1 + + "\", or \"" + + OtelInternalJavadoc.EXPECTED_INTERNAL_COMMENT_V2 + + "\"", + severity = WARNING) +public class OtelInternalJavadoc extends BugChecker implements BugChecker.ClassTreeMatcher { + + private static final long serialVersionUID = 1L; + + private static final Pattern INTERNAL_PACKAGE_PATTERN = Pattern.compile("\\binternal\\b"); + + static final String EXPECTED_INTERNAL_COMMENT_V1 = + "This class is internal and is hence not for public use." + + " Its APIs are unstable and can change at any time."; + + static final String EXPECTED_INTERNAL_COMMENT_V2 = + "This class is internal and experimental. Its APIs are unstable and can change at any time." + + " Its APIs (or a version of them) may be promoted to the public stable API in the" + + " future, but no guarantees are made."; + + @Override + public Description matchClass(ClassTree tree, VisitorState state) { + if (!isPublic(tree) || !isInternal(state) || tree.getSimpleName().toString().endsWith("Test")) { + return Description.NO_MATCH; + } + String javadoc = getJavadoc(state); + if (javadoc != null + && (javadoc.contains(EXPECTED_INTERNAL_COMMENT_V1) + || javadoc.contains(EXPECTED_INTERNAL_COMMENT_V2))) { + return Description.NO_MATCH; + } + return describeMatch(tree); + } + + private static boolean isPublic(ClassTree tree) { + return tree.getModifiers().getFlags().contains(Modifier.PUBLIC); + } + + private static boolean isInternal(VisitorState state) { + PackageTree packageTree = state.getPath().getCompilationUnit().getPackage(); + if (packageTree == null) { + return false; + } + String packageName = state.getSourceForNode(packageTree.getPackageName()); + return packageName != null && INTERNAL_PACKAGE_PATTERN.matcher(packageName).find(); + } + + @Nullable + private static String getJavadoc(VisitorState state) { + DocCommentTree docCommentTree = + JavacTrees.instance(state.context).getDocCommentTree(state.getPath()); + if (docCommentTree == null) { + return null; + } + return docCommentTree.toString().replace("\n", ""); + } +} diff --git a/custom-checks/src/main/java/io/opentelemetry/gradle/customchecks/OtelPrivateConstructorForUtilityClass.java b/custom-checks/src/main/java/io/opentelemetry/gradle/customchecks/OtelPrivateConstructorForUtilityClass.java new file mode 100644 index 00000000000..5aff2f1db4c --- /dev/null +++ b/custom-checks/src/main/java/io/opentelemetry/gradle/customchecks/OtelPrivateConstructorForUtilityClass.java @@ -0,0 +1,38 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.gradle.customchecks; + +import static com.google.errorprone.BugPattern.SeverityLevel.WARNING; +import static com.google.errorprone.matchers.Description.NO_MATCH; + +import com.google.errorprone.BugPattern; +import com.google.errorprone.VisitorState; +import com.google.errorprone.bugpatterns.BugChecker; +import com.google.errorprone.bugpatterns.PrivateConstructorForUtilityClass; +import com.google.errorprone.matchers.Description; +import com.sun.source.tree.ClassTree; + +@BugPattern( + summary = + "Classes which are not intended to be instantiated should be made non-instantiable with a private constructor. This includes utility classes (classes with only static members), and the main class.", + severity = WARNING) +public class OtelPrivateConstructorForUtilityClass extends BugChecker + implements BugChecker.ClassTreeMatcher { + + private static final long serialVersionUID = 1L; + + private final PrivateConstructorForUtilityClass delegate = + new PrivateConstructorForUtilityClass(); + + @Override + public Description matchClass(ClassTree tree, VisitorState state) { + Description description = delegate.matchClass(tree, state); + if (description == NO_MATCH) { + return description; + } + return describeMatch(tree); + } +} diff --git a/custom-checks/src/main/resources/META-INF/services/com.google.errorprone.bugpatterns.BugChecker b/custom-checks/src/main/resources/META-INF/services/com.google.errorprone.bugpatterns.BugChecker new file mode 100644 index 00000000000..e73ad8bbe73 --- /dev/null +++ b/custom-checks/src/main/resources/META-INF/services/com.google.errorprone.bugpatterns.BugChecker @@ -0,0 +1,2 @@ +io.opentelemetry.gradle.customchecks.OtelInternalJavadoc +io.opentelemetry.gradle.customchecks.OtelPrivateConstructorForUtilityClass diff --git a/custom-checks/src/test/java/io/opentelemetry/gradle/customchecks/OtelInternalJavadocTest.java b/custom-checks/src/test/java/io/opentelemetry/gradle/customchecks/OtelInternalJavadocTest.java new file mode 100644 index 00000000000..34fbc46b6b6 --- /dev/null +++ b/custom-checks/src/test/java/io/opentelemetry/gradle/customchecks/OtelInternalJavadocTest.java @@ -0,0 +1,58 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.gradle.customchecks; + +import com.google.errorprone.CompilationTestHelper; +import org.junit.jupiter.api.Test; + +class OtelInternalJavadocTest { + + @Test + void positiveCases() { + CompilationTestHelper.newInstance(OtelInternalJavadoc.class, OtelInternalJavadocTest.class) + .addSourceLines( + "internal/InternalJavadocPositiveCases.java", + "/*", + " * Copyright The OpenTelemetry Authors", + " * SPDX-License-Identifier: Apache-2.0", + " */", + "package io.opentelemetry.gradle.customchecks.internal;", + "// BUG: Diagnostic contains: doesn't end with any of the applicable javadoc disclaimers", + "public class InternalJavadocPositiveCases {", + " // BUG: Diagnostic contains: doesn't end with any of the applicable javadoc disclaimers", + " public static class One {}", + " /** Doesn't have the disclaimer. */", + " // BUG: Diagnostic contains: doesn't end with any of the applicable javadoc disclaimers", + " public static class Two {}", + "}") + .doTest(); + } + + @Test + void negativeCases() { + CompilationTestHelper.newInstance(OtelInternalJavadoc.class, OtelInternalJavadocTest.class) + .addSourceLines( + "internal/InternalJavadocNegativeCases.java", + "/*", + " * Copyright The OpenTelemetry Authors", + " * SPDX-License-Identifier: Apache-2.0", + " */", + "package io.opentelemetry.gradle.customchecks.internal;", + "/**", + " * This class is internal and is hence not for public use. Its APIs are unstable and can change at", + " * any time.", + " */", + "public class InternalJavadocNegativeCases {", + " /**", + " * This class is internal and is hence not for public use. Its APIs are unstable and can change at", + " * any time.", + " */", + " public static class One {}", + " static class Two {}", + "}") + .doTest(); + } +} diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts index dc66b95b572..39b085f72f1 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -8,36 +8,51 @@ val dependencyVersions = hashMapOf() rootProject.extra["versions"] = dependencyVersions val DEPENDENCY_BOMS = listOf( - "com.fasterxml.jackson:jackson-bom:2.15.2", - "com.google.guava:guava-bom:32.1.2-jre", - "com.google.protobuf:protobuf-bom:3.24.3", - "com.linecorp.armeria:armeria-bom:1.25.2", - "com.squareup.okhttp3:okhttp-bom:4.11.0", - "com.squareup.okio:okio-bom:3.5.0", // applies to transitive dependencies of okhttp - "io.grpc:grpc-bom:1.58.0", - "io.netty:netty-bom:4.1.97.Final", - "io.zipkin.brave:brave-bom:5.16.0", - "io.zipkin.reporter2:zipkin-reporter-bom:2.16.4", - "org.assertj:assertj-bom:3.24.2", - "org.junit:junit-bom:5.10.0", - "org.testcontainers:testcontainers-bom:1.19.0", - "org.snakeyaml:snakeyaml-engine:2.7" + // for some reason boms show up as runtime dependencies in license and vulnerability scans + // even if they are only used by test dependencies, so not using junit bom here + // (which is EPL licensed) or armeria bom (which is Apache licensed but is getting flagged + // by FOSSA for containing EPL-licensed) + + "com.fasterxml.jackson:jackson-bom:2.18.3", + "com.google.guava:guava-bom:33.4.6-jre", + "com.google.protobuf:protobuf-bom:4.30.2", + "com.squareup.okhttp3:okhttp-bom:4.12.0", + "com.squareup.okio:okio-bom:3.10.2", // applies to transitive dependencies of okhttp + "io.grpc:grpc-bom:1.71.0", + "io.netty:netty-bom:4.2.0.Final", + "io.zipkin.brave:brave-bom:6.1.0", + "io.zipkin.reporter2:zipkin-reporter-bom:3.5.0", + "org.assertj:assertj-bom:3.27.3", + "org.testcontainers:testcontainers-bom:1.20.6", + "org.snakeyaml:snakeyaml-engine:2.9" ) -val autoValueVersion = "1.10.3" -val errorProneVersion = "2.21.1" +val autoValueVersion = "1.11.0" +val errorProneVersion = "2.37.0" val jmhVersion = "1.37" // Mockito 5.x.x requires Java 11 https://github.com/mockito/mockito/releases/tag/v5.0.0 val mockitoVersion = "4.11.0" -val slf4jVersion = "2.0.9" +val slf4jVersion = "2.0.17" val opencensusVersion = "0.31.1" val prometheusClientVersion = "0.16.0" +val prometheusServerVersion = "1.3.6" +val armeriaVersion = "1.32.3" +val junitVersion = "5.12.1" val DEPENDENCIES = listOf( + "org.junit.jupiter:junit-jupiter-api:${junitVersion}", + "org.junit.jupiter:junit-jupiter-params:${junitVersion}", + "org.junit.jupiter:junit-jupiter-pioneer:${junitVersion}", + "com.linecorp.armeria:armeria:${armeriaVersion}", + "com.linecorp.armeria:armeria-grpc:${armeriaVersion}", + "com.linecorp.armeria:armeria-grpc-protocol:${armeriaVersion}", + "com.linecorp.armeria:armeria-junit5:${armeriaVersion}", + "com.google.auto.value:auto-value:${autoValueVersion}", "com.google.auto.value:auto-value-annotations:${autoValueVersion}", "com.google.errorprone:error_prone_annotations:${errorProneVersion}", "com.google.errorprone:error_prone_core:${errorProneVersion}", + "com.google.errorprone:error_prone_test_helpers:${errorProneVersion}", "io.opencensus:opencensus-api:${opencensusVersion}", "io.opencensus:opencensus-impl-core:${opencensusVersion}", "io.opencensus:opencensus-impl:${opencensusVersion}", @@ -45,37 +60,44 @@ val DEPENDENCIES = listOf( "io.opencensus:opencensus-contrib-exemplar-util:${opencensusVersion}", "org.openjdk.jmh:jmh-core:${jmhVersion}", "org.openjdk.jmh:jmh-generator-bytecode:${jmhVersion}", + "org.openjdk.jmh:jmh-generator-annprocess:${jmhVersion}", "org.mockito:mockito-core:${mockitoVersion}", "org.mockito:mockito-junit-jupiter:${mockitoVersion}", "org.slf4j:slf4j-simple:${slf4jVersion}", "org.slf4j:jul-to-slf4j:${slf4jVersion}", + "io.prometheus:prometheus-metrics-shaded-protobuf:1.3.1", + "io.prometheus:prometheus-metrics-exporter-httpserver:${prometheusServerVersion}", + "io.prometheus:prometheus-metrics-exposition-formats:${prometheusServerVersion}", "io.prometheus:simpleclient:${prometheusClientVersion}", "io.prometheus:simpleclient_common:${prometheusClientVersion}", "io.prometheus:simpleclient_httpserver:${prometheusClientVersion}", "javax.annotation:javax.annotation-api:1.3.2", "com.github.stefanbirkner:system-rules:1.19.0", - "com.google.api.grpc:proto-google-common-protos:2.23.0", + "com.google.api.grpc:proto-google-common-protos:2.54.1", "com.google.code.findbugs:jsr305:3.0.2", "com.google.guava:guava-beta-checker:1.0", "com.sun.net.httpserver:http:20070405", - "com.tngtech.archunit:archunit-junit5:1.1.0", - "com.uber.nullaway:nullaway:0.10.14", + "com.tngtech.archunit:archunit-junit5:1.4.0", + "com.uber.nullaway:nullaway:0.12.6", "edu.berkeley.cs.jqf:jqf-fuzz:1.7", // jqf-fuzz version 1.8+ requires Java 11+ "eu.rekawek.toxiproxy:toxiproxy-java:2.1.7", "io.github.netmikey.logunit:logunit-jul:2.0.0", "io.jaegertracing:jaeger-client:1.8.1", - "io.opentelemetry.proto:opentelemetry-proto:1.0.0-alpha", - "io.opentelemetry.contrib:opentelemetry-aws-xray-propagator:1.29.0-alpha", + "io.opentelemetry.contrib:opentelemetry-aws-xray-propagator:1.45.0-alpha", + "io.opentelemetry.semconv:opentelemetry-semconv-incubating:1.32.0-alpha", + "io.opentelemetry.proto:opentelemetry-proto:1.5.0-alpha", "io.opentracing:opentracing-api:0.33.0", "io.opentracing:opentracing-noop:0.33.0", "junit:junit:4.13.2", - "nl.jqno.equalsverifier:equalsverifier:3.15.1", - "org.awaitility:awaitility:4.2.0", + "nl.jqno.equalsverifier:equalsverifier:3.19.3", + "org.awaitility:awaitility:4.3.0", "org.bouncycastle:bcpkix-jdk15on:1.70", - "org.codehaus.mojo:animal-sniffer-annotations:1.23", - "org.jctools:jctools-core:4.0.1", + "org.codehaus.mojo:animal-sniffer-annotations:1.24", + "org.jctools:jctools-core:4.0.5", "org.junit-pioneer:junit-pioneer:1.9.1", - "org.skyscreamer:jsonassert:1.5.1", + "org.mock-server:mockserver-netty:5.15.0:shaded", + "org.skyscreamer:jsonassert:1.5.3", + "com.android.tools:desugar_jdk_libs:2.1.5", ) javaPlatform { diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-jaeger-thrift.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-api.txt similarity index 100% rename from docs/apidiffs/current_vs_latest/opentelemetry-exporter-jaeger-thrift.txt rename to docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-api.txt diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-jaeger.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-context.txt similarity index 100% rename from docs/apidiffs/current_vs_latest/opentelemetry-exporter-jaeger.txt rename to docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-context.txt diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-jaeger-thrift.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-jaeger-thrift.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-jaeger-thrift.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-jaeger.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-jaeger.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-jaeger.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..ec040faaf8b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,10 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.logging.LoggingMetricExporter (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) java.lang.String toString() +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.logging.LoggingSpanExporter (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) java.lang.String toString() +*** MODIFIED CLASS: PUBLIC io.opentelemetry.exporter.logging.SystemOutLogRecordExporter (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) java.lang.String toString() diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..772e014e5a0 --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,4 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.zipkin.ZipkinSpanExporter (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) java.lang.String toString() diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..dff1c969ed8 --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,4 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.metrics.ViewBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.metrics.ViewBuilder setAttributeFilter(java.util.Set) diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk.txt b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.0_vs_1.29.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-api.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-api.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-context.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-context.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-jaeger-thrift.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-jaeger-thrift.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-jaeger-thrift.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-jaeger.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-jaeger.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-jaeger.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk.txt b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.30.1_vs_1.30.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-api.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-api.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-context.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-context.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-common.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-jaeger-thrift.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-jaeger-thrift.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-jaeger-thrift.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-jaeger.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-jaeger.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-jaeger.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-common.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..cc95503822e --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-common.txt @@ -0,0 +1,11 @@ +Comparing source compatibility of against ++++ NEW ENUM: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.common.export.MemoryMode (compatible) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + +++ NEW INTERFACE: java.lang.constant.Constable + +++ NEW INTERFACE: java.lang.Comparable + +++ NEW INTERFACE: java.io.Serializable + +++ NEW SUPERCLASS: java.lang.Enum + +++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) io.opentelemetry.sdk.common.export.MemoryMode REUSABLE_DATA + +++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) io.opentelemetry.sdk.common.export.MemoryMode IMMUTABLE_DATA + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.sdk.common.export.MemoryMode valueOf(java.lang.String) + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.sdk.common.export.MemoryMode[] values() diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..11010a0b08b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-logs.txt @@ -0,0 +1,4 @@ +Comparing source compatibility of against +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.logs.ReadWriteLogRecord (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.logs.ReadWriteLogRecord setAllAttributes(io.opentelemetry.api.common.Attributes) diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..6cf519e2823 --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-metrics.txt @@ -0,0 +1,21 @@ +Comparing source compatibility of against +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.metrics.export.CollectionRegistration (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) java.util.Collection collectAllMetrics() + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.sdk.metrics.export.CollectionRegistration noop() +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.metrics.export.MetricExporter (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.common.export.MemoryMode getMemoryMode() ++++ NEW INTERFACE: PUBLIC(+) ABSTRACT(+) io.opentelemetry.sdk.metrics.export.MetricProducer (not serializable) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + +++ NEW SUPERCLASS: java.lang.Object + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) java.util.Collection produce(io.opentelemetry.sdk.resources.Resource) +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.metrics.export.MetricReader (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.common.export.MemoryMode getMemoryMode() +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.metrics.export.PeriodicMetricReader (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.common.export.MemoryMode getMemoryMode() +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder registerMetricProducer(io.opentelemetry.sdk.metrics.export.MetricProducer) diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..24ed11618d8 --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-testing.txt @@ -0,0 +1,12 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.sdk.testing.exporter.InMemoryMetricReaderBuilder builder() + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.common.export.MemoryMode getMemoryMode() ++++ NEW CLASS: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.testing.exporter.InMemoryMetricReaderBuilder (not serializable) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + +++ NEW SUPERCLASS: java.lang.Object + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader build() + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.exporter.InMemoryMetricReaderBuilder setAggregationTemporalitySelector(io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.exporter.InMemoryMetricReaderBuilder setDefaultAggregationSelector(io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.exporter.InMemoryMetricReaderBuilder setMemoryMode(io.opentelemetry.sdk.common.export.MemoryMode) diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk.txt b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.31.0_vs_1.30.1/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-api.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-api.txt new file mode 100644 index 00000000000..faa8d3cf893 --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-api.txt @@ -0,0 +1,7 @@ +Comparing source compatibility of against +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.api.metrics.DoubleHistogramBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.metrics.DoubleHistogramBuilder setExplicitBucketBoundariesAdvice(java.util.List) +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.api.metrics.LongHistogramBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.metrics.LongHistogramBuilder setExplicitBucketBoundariesAdvice(java.util.List) diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-context.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-context.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-jaeger-thrift.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-jaeger-thrift.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-jaeger-thrift.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-jaeger.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-jaeger.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-jaeger.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..afee563c67e --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,13 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder setMeterProvider(java.util.function.Supplier) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder setMeterProvider(java.util.function.Supplier) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder setMeterProvider(java.util.function.Supplier) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder setMeterProvider(java.util.function.Supplier) diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..e8f3d45d4f7 --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,9 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.testing.junit4.OpenTelemetryRule (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) void clearLogRecords() + +++ NEW METHOD: PUBLIC(+) java.util.List getLogRecords() +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.testing.junit5.OpenTelemetryExtension (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) void clearLogRecords() + +++ NEW METHOD: PUBLIC(+) java.util.List getLogRecords() diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk.txt b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.32.0_vs_1.31.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-api.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-api.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-context.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-context.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-jaeger-thrift.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-jaeger-thrift.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-jaeger-thrift.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-jaeger.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-jaeger.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-jaeger.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..7c0ed9b5e5f --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,25 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder setConnectTimeout(long, java.util.concurrent.TimeUnit) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder setConnectTimeout(java.time.Duration) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder setHeaders(java.util.function.Supplier>) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder setConnectTimeout(long, java.util.concurrent.TimeUnit) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder setConnectTimeout(java.time.Duration) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder setHeaders(java.util.function.Supplier>) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder setConnectTimeout(long, java.util.concurrent.TimeUnit) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder setConnectTimeout(java.time.Duration) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder setHeaders(java.util.function.Supplier>) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder setHeaders(java.util.function.Supplier>) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder setHeaders(java.util.function.Supplier>) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder setHeaders(java.util.function.Supplier>) diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..1945e8c996d --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,9 @@ +Comparing source compatibility of against +=== UNCHANGED CLASS: PUBLIC FINAL io.opentelemetry.extension.kotlin.ContextExtensionsKt (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + *** MODIFIED ANNOTATION: kotlin.Metadata + === UNCHANGED ELEMENT: xi=48 + *** MODIFIED ELEMENT: mv=1,6,0 (<- 1,9,0) + === UNCHANGED ELEMENT: k=2 + === UNCHANGED ELEMENT: d1=�� � ��� ��� ��� ���� ����0�*�0�� ����0�*�0�� ����0�*�0�¨�� + === UNCHANGED ELEMENT: d2=asContextElement,Lkotlin/coroutines/CoroutineContext;,Lio/opentelemetry/context/Context;,Lio/opentelemetry/context/ImplicitContextKeyed;,getOpenTelemetryContext,opentelemetry-extension-kotlin diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..9f4a3fbb37a --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,5 @@ +Comparing source compatibility of against +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer addLogRecordProcessorCustomizer(java.util.function.BiFunction) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer addSpanProcessorCustomizer(java.util.function.BiFunction) diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..c4389093f93 --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,5 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdkBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer addLogRecordProcessorCustomizer(java.util.function.BiFunction) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdkBuilder addSpanProcessorCustomizer(java.util.function.BiFunction) diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..c1e98158b3d --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,5 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC ABSTRACT io.opentelemetry.sdk.testing.assertj.AbstractPointAssert (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + GENERIC TEMPLATES: === PointAssertT:io.opentelemetry.sdk.testing.assertj.AbstractPointAssert, === PointT:io.opentelemetry.sdk.metrics.data.PointData + +++ NEW METHOD: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.testing.assertj.AbstractPointAssert hasAttributesSatisfying(java.util.function.Consumer) diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk.txt b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.33.0_vs_1.32.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-api.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-api.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-context.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-context.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-jaeger-thrift.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-jaeger-thrift.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-jaeger-thrift.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-jaeger.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-jaeger.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-jaeger.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..f03b01723c1 --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,12 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.trace.export.BatchSpanProcessorBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.trace.export.BatchSpanProcessorBuilder setExportUnsampledSpans(boolean) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.trace.export.SimpleSpanProcessor (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.sdk.trace.export.SimpleSpanProcessorBuilder builder(io.opentelemetry.sdk.trace.export.SpanExporter) ++++ NEW CLASS: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.trace.export.SimpleSpanProcessorBuilder (not serializable) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + +++ NEW SUPERCLASS: java.lang.Object + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.trace.export.SimpleSpanProcessor build() + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.trace.export.SimpleSpanProcessorBuilder setExportUnsampledSpans(boolean) diff --git a/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk.txt b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.0_vs_1.33.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-api.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-api.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-context.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-context.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-jaeger-thrift.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-jaeger-thrift.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-jaeger-thrift.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-jaeger.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-jaeger.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-jaeger.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk.txt b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.34.1_vs_1.34.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-api.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-api.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-context.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-context.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-common.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..479860c48b0 --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,9 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.zipkin.ZipkinSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + === UNCHANGED METHOD: PUBLIC io.opentelemetry.exporter.zipkin.ZipkinSpanExporterBuilder setEncoder(zipkin2.codec.BytesEncoder) + +++ NEW ANNOTATION: java.lang.Deprecated + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.zipkin.ZipkinSpanExporterBuilder setEncoder(zipkin2.reporter.BytesEncoder) + === UNCHANGED METHOD: PUBLIC io.opentelemetry.exporter.zipkin.ZipkinSpanExporterBuilder setSender(zipkin2.reporter.Sender) + +++ NEW ANNOTATION: java.lang.Deprecated + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.zipkin.ZipkinSpanExporterBuilder setSender(zipkin2.reporter.BytesMessageSender) diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-common.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk.txt b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.35.0_vs_1.34.1/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-api.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-api.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-context.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-context.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..36d0f38ffc9 --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,22 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder setProxyOptions(io.opentelemetry.sdk.common.export.ProxyOptions) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder setProxyOptions(io.opentelemetry.sdk.common.export.ProxyOptions) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder setProxy(io.opentelemetry.sdk.common.export.ProxyOptions) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder setConnectTimeout(long, java.util.concurrent.TimeUnit) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder setConnectTimeout(java.time.Duration) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder setConnectTimeout(long, java.util.concurrent.TimeUnit) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder setConnectTimeout(java.time.Duration) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder setConnectTimeout(long, java.util.concurrent.TimeUnit) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder setConnectTimeout(java.time.Duration) diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..80cfb0c6678 --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-common.txt @@ -0,0 +1,8 @@ +Comparing source compatibility of against ++++ NEW CLASS: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.common.export.ProxyOptions (not serializable) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + +++ NEW SUPERCLASS: java.lang.Object + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.sdk.common.export.ProxyOptions create(java.net.ProxySelector) + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.sdk.common.export.ProxyOptions create(java.net.InetSocketAddress) + +++ NEW METHOD: PUBLIC(+) java.net.ProxySelector getProxySelector() + +++ NEW METHOD: PUBLIC(+) java.lang.String toString() diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..c464bbdd2e2 --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,4 @@ +Comparing source compatibility of against +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer addMetricReaderCustomizer(java.util.function.BiFunction) diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..f9bf6463d2c --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,5 @@ +Comparing source compatibility of against +**** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdkBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + ===* UNCHANGED METHOD: PUBLIC SYNTHETIC (<- NON_SYNTHETIC) BRIDGE (<- NON_BRIDGE) io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer addLogRecordProcessorCustomizer(java.util.function.BiFunction(<- )) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdkBuilder addMetricReaderCustomizer(java.util.function.BiFunction) diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk.txt b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.36.0_vs_1.35.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-api.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-api.txt new file mode 100644 index 00000000000..815a9fed06c --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-api.txt @@ -0,0 +1,5 @@ +Comparing source compatibility of against +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.api.trace.Span (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.trace.Span addLink(io.opentelemetry.api.trace.SpanContext) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.trace.Span addLink(io.opentelemetry.api.trace.SpanContext, io.opentelemetry.api.common.Attributes) diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-context.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-context.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..d8fcd994b8d --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,7 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporter (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.common.export.MemoryMode getMemoryMode() +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.common.export.MemoryMode getMemoryMode() diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..2039c1c6e68 --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,7 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.logs.export.LogRecordExporter getLogRecordExporter() +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.logs.export.LogRecordExporter getLogRecordExporter() diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..52678bd9ba6 --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,7 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.trace.export.BatchSpanProcessor (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.trace.export.SpanExporter getSpanExporter() +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.trace.export.SimpleSpanProcessor (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.trace.export.SpanExporter getSpanExporter() diff --git a/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk.txt b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.37.0_vs_1.36.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-api.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-api.txt new file mode 100644 index 00000000000..d050fbe201a --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-api.txt @@ -0,0 +1,19 @@ +Comparing source compatibility of against ++++ NEW INTERFACE: PUBLIC(+) ABSTRACT(+) io.opentelemetry.api.metrics.DoubleGauge (not serializable) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + +++ NEW SUPERCLASS: java.lang.Object + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) void set(double) + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) void set(double, io.opentelemetry.api.common.Attributes) + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) void set(double, io.opentelemetry.api.common.Attributes, io.opentelemetry.context.Context) +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.api.metrics.DoubleGaugeBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.metrics.DoubleGauge build() ++++ NEW INTERFACE: PUBLIC(+) ABSTRACT(+) io.opentelemetry.api.metrics.LongGauge (not serializable) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + +++ NEW SUPERCLASS: java.lang.Object + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) void set(long) + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) void set(long, io.opentelemetry.api.common.Attributes) + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) void set(long, io.opentelemetry.api.common.Attributes, io.opentelemetry.context.Context) +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.api.metrics.LongGaugeBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.metrics.LongGauge build() diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-context.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-context.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..71f05afd4d1 --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-common.txt @@ -0,0 +1,4 @@ +Comparing source compatibility of against +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.common.Clock (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) long now(boolean) diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..ef9c86f56b8 --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,10 @@ +Comparing source compatibility of against +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) STATIC(+) java.lang.String asString(io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector) +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) STATIC(+) java.lang.String asString(io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector) +*** MODIFIED ENUM: PUBLIC FINAL io.opentelemetry.sdk.metrics.InstrumentType (compatible) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) io.opentelemetry.sdk.metrics.InstrumentType GAUGE diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..722ee3a950f --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,4 @@ +Comparing source compatibility of against +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.trace.ReadableSpan (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.common.Attributes getAttributes() diff --git a/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk.txt b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.38.0_vs_1.37.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-api.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-api.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-context.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-context.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..35bb9f3e97e --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,19 @@ +Comparing source compatibility of against +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + *** MODIFIED METHOD: PUBLIC (<- PACKAGE_PROTECTED) io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder setMemoryMode(io.opentelemetry.sdk.common.export.MemoryMode) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + *** MODIFIED METHOD: PUBLIC (<- PACKAGE_PROTECTED) io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder setMemoryMode(io.opentelemetry.sdk.common.export.MemoryMode) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + *** MODIFIED METHOD: PUBLIC (<- PACKAGE_PROTECTED) io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder setMemoryMode(io.opentelemetry.sdk.common.export.MemoryMode) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + *** MODIFIED METHOD: PUBLIC (<- PACKAGE_PROTECTED) io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder setMemoryMode(io.opentelemetry.sdk.common.export.MemoryMode) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + *** MODIFIED METHOD: PUBLIC (<- PACKAGE_PROTECTED) io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder setMemoryMode(io.opentelemetry.sdk.common.export.MemoryMode) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + *** MODIFIED METHOD: PUBLIC (<- PACKAGE_PROTECTED) io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder setMemoryMode(io.opentelemetry.sdk.common.export.MemoryMode) diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk.txt b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..df26146497b --- /dev/null +++ b/docs/apidiffs/1.39.0_vs_1.38.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of against +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-api.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-api.txt new file mode 100644 index 00000000000..ab4fa9fb021 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-api-1.40.0.jar against opentelemetry-api-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-context.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-context.txt new file mode 100644 index 00000000000..0e0c373df57 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-context-1.40.0.jar against opentelemetry-context-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..8ebecc326a1 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-common-1.40.0.jar against opentelemetry-exporter-common-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..a422f20359e --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-otlp-1.40.0.jar against opentelemetry-exporter-logging-otlp-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..5492a866052 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-1.40.0.jar against opentelemetry-exporter-logging-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..74fcc72b9de --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-common-1.40.0.jar against opentelemetry-exporter-otlp-common-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..dc0babb48fd --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-1.40.0.jar against opentelemetry-exporter-otlp-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..f02a4d71f87 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-grpc-managed-channel-1.40.0.jar against opentelemetry-exporter-sender-grpc-managed-channel-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..385c556dfb6 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-jdk-1.40.0.jar against opentelemetry-exporter-sender-jdk-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..d325d58ea5d --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-okhttp-1.40.0.jar against opentelemetry-exporter-sender-okhttp-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..8ee1e891c91 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-zipkin-1.40.0.jar against opentelemetry-exporter-zipkin-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..c038bdf240a --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-kotlin-1.40.0.jar against opentelemetry-extension-kotlin-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..45091f1a06c --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-trace-propagators-1.40.0.jar against opentelemetry-extension-trace-propagators-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..06013311639 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-opentracing-shim-1.40.0.jar against opentelemetry-opentracing-shim-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..97c399bb7b7 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-common-1.40.0.jar against opentelemetry-sdk-common-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..d6707767630 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-spi-1.40.0.jar against opentelemetry-sdk-extension-autoconfigure-spi-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..72c89c9f34c --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-1.40.0.jar against opentelemetry-sdk-extension-autoconfigure-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..edf13347673 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-jaeger-remote-sampler-1.40.0.jar against opentelemetry-sdk-extension-jaeger-remote-sampler-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..04ad97a31ae --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-logs-1.40.0.jar against opentelemetry-sdk-logs-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..b4b81d47fa6 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-metrics-1.40.0.jar against opentelemetry-sdk-metrics-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..3eba71a9768 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-testing-1.40.0.jar against opentelemetry-sdk-testing-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..38bc5b93520 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-trace-1.40.0.jar against opentelemetry-sdk-trace-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk.txt b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..6eb2fc4ff19 --- /dev/null +++ b/docs/apidiffs/1.40.0_vs_1.39.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-1.40.0.jar against opentelemetry-sdk-1.39.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-api.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-api.txt new file mode 100644 index 00000000000..afe8b67a002 --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-api-1.41.0.jar against opentelemetry-api-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-context.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-context.txt new file mode 100644 index 00000000000..02ae6fcdd0d --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-context-1.41.0.jar against opentelemetry-context-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..d7c1e865d7f --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-common-1.41.0.jar against opentelemetry-exporter-common-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..05e56d6ba4e --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-otlp-1.41.0.jar against opentelemetry-exporter-logging-otlp-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..8015b7b7ae0 --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-1.41.0.jar against opentelemetry-exporter-logging-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..3c388babdcc --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-common-1.41.0.jar against opentelemetry-exporter-otlp-common-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..6858a24a84d --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-1.41.0.jar against opentelemetry-exporter-otlp-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..64d2a51493f --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-grpc-managed-channel-1.41.0.jar against opentelemetry-exporter-sender-grpc-managed-channel-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..e84cf24859a --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-jdk-1.41.0.jar against opentelemetry-exporter-sender-jdk-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..d5e2bdb2a72 --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-okhttp-1.41.0.jar against opentelemetry-exporter-sender-okhttp-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..a6290a1aa53 --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-zipkin-1.41.0.jar against opentelemetry-exporter-zipkin-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..8f6c4acd90c --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-kotlin-1.41.0.jar against opentelemetry-extension-kotlin-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..0be9d14cee0 --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-trace-propagators-1.41.0.jar against opentelemetry-extension-trace-propagators-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..68d4997cb1b --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-opentracing-shim-1.41.0.jar against opentelemetry-opentracing-shim-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..cf51f2f56ef --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-common.txt @@ -0,0 +1,7 @@ +Comparing source compatibility of opentelemetry-sdk-common-1.41.0.jar against opentelemetry-sdk-common-1.40.0.jar +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.common.CompletableResultCode (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.common.CompletableResultCode failExceptionally(java.lang.Throwable) + +++ NEW METHOD: PUBLIC(+) java.lang.Throwable getFailureThrowable() + +++ NEW ANNOTATION: javax.annotation.Nullable + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.sdk.common.CompletableResultCode ofExceptionalFailure(java.lang.Throwable) diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..55b6f8964c7 --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-spi-1.41.0.jar against opentelemetry-sdk-extension-autoconfigure-spi-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..99b5916941d --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-1.41.0.jar against opentelemetry-sdk-extension-autoconfigure-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..08fc54d6789 --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-jaeger-remote-sampler-1.41.0.jar against opentelemetry-sdk-extension-jaeger-remote-sampler-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..d6449ab8f5f --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-logs-1.41.0.jar against opentelemetry-sdk-logs-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..714c909d030 --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-metrics-1.41.0.jar against opentelemetry-sdk-metrics-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..7db61bd614f --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-testing-1.41.0.jar against opentelemetry-sdk-testing-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..077d6ad7ed2 --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-trace-1.41.0.jar against opentelemetry-sdk-trace-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk.txt b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..b2a1b0452f8 --- /dev/null +++ b/docs/apidiffs/1.41.0_vs_1.40.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-1.41.0.jar against opentelemetry-sdk-1.40.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-api.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-api.txt new file mode 100644 index 00000000000..3a96056c4f1 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-api.txt @@ -0,0 +1,41 @@ +Comparing source compatibility of opentelemetry-api-1.42.0.jar against opentelemetry-api-1.41.0.jar ++++ NEW INTERFACE: PUBLIC(+) ABSTRACT(+) io.opentelemetry.api.common.KeyValue (not serializable) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + +++ NEW SUPERCLASS: java.lang.Object + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) java.lang.String getKey() + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) io.opentelemetry.api.common.Value getValue() + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.api.common.KeyValue of(java.lang.String, io.opentelemetry.api.common.Value) ++++ NEW INTERFACE: PUBLIC(+) ABSTRACT(+) io.opentelemetry.api.common.Value (not serializable) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + GENERIC TEMPLATES: +++ T:java.lang.Object + +++ NEW SUPERCLASS: java.lang.Object + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) java.lang.String asString() + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) io.opentelemetry.api.common.ValueType getType() + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) java.lang.Object getValue() + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.api.common.Value of(java.lang.String) + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.api.common.Value of(boolean) + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.api.common.Value of(long) + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.api.common.Value of(double) + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.api.common.Value of(byte[]) + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.api.common.Value>> of(io.opentelemetry.api.common.Value[]) + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.api.common.Value>> of(java.util.List>) + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.api.common.Value> of(io.opentelemetry.api.common.KeyValue[]) + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.api.common.Value> of(java.util.Map>) ++++ NEW ENUM: PUBLIC(+) FINAL(+) io.opentelemetry.api.common.ValueType (compatible) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + +++ NEW INTERFACE: java.lang.constant.Constable + +++ NEW INTERFACE: java.lang.Comparable + +++ NEW INTERFACE: java.io.Serializable + +++ NEW SUPERCLASS: java.lang.Enum + +++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) io.opentelemetry.api.common.ValueType BYTES + +++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) io.opentelemetry.api.common.ValueType ARRAY + +++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) io.opentelemetry.api.common.ValueType KEY_VALUE_LIST + +++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) io.opentelemetry.api.common.ValueType STRING + +++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) io.opentelemetry.api.common.ValueType DOUBLE + +++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) io.opentelemetry.api.common.ValueType BOOLEAN + +++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) io.opentelemetry.api.common.ValueType LONG + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.api.common.ValueType valueOf(java.lang.String) + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.api.common.ValueType[] values() +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.api.logs.LogRecordBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.logs.LogRecordBuilder setBody(io.opentelemetry.api.common.Value) diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-context.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-context.txt new file mode 100644 index 00000000000..4235de83b07 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-context-1.42.0.jar against opentelemetry-context-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..b9516eeee56 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-common-1.42.0.jar against opentelemetry-exporter-common-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..c20575d8213 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-otlp-1.42.0.jar against opentelemetry-exporter-logging-otlp-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..742accf915d --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-1.42.0.jar against opentelemetry-exporter-logging-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..65d41b6ec4e --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-common-1.42.0.jar against opentelemetry-exporter-otlp-common-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..217a061f337 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-1.42.0.jar against opentelemetry-exporter-otlp-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..761405711f8 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-grpc-managed-channel-1.42.0.jar against opentelemetry-exporter-sender-grpc-managed-channel-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..c1b86871894 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-jdk-1.42.0.jar against opentelemetry-exporter-sender-jdk-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..8fee739b86d --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-okhttp-1.42.0.jar against opentelemetry-exporter-sender-okhttp-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..29c5e24ef73 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-zipkin-1.42.0.jar against opentelemetry-exporter-zipkin-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..74011673f81 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-kotlin-1.42.0.jar against opentelemetry-extension-kotlin-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..5a5b46fc554 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-trace-propagators-1.42.0.jar against opentelemetry-extension-trace-propagators-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..459a5cbb037 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-opentracing-shim-1.42.0.jar against opentelemetry-opentracing-shim-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..15d384cb970 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-common-1.42.0.jar against opentelemetry-sdk-common-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..507709301e4 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-spi-1.42.0.jar against opentelemetry-sdk-extension-autoconfigure-spi-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..bc405d103f5 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-1.42.0.jar against opentelemetry-sdk-extension-autoconfigure-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..debfbaf6acc --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-jaeger-remote-sampler-1.42.0.jar against opentelemetry-sdk-extension-jaeger-remote-sampler-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..44d010071ed --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,15 @@ +Comparing source compatibility of opentelemetry-sdk-logs-1.42.0.jar against opentelemetry-sdk-logs-1.41.0.jar +=== UNCHANGED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.logs.data.Body (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + === UNCHANGED METHOD: PUBLIC ABSTRACT io.opentelemetry.sdk.logs.data.Body$Type getType() + +++ NEW ANNOTATION: java.lang.Deprecated + +++ NEW ANNOTATION: java.lang.Deprecated +=== UNCHANGED ENUM: PUBLIC STATIC FINAL io.opentelemetry.sdk.logs.data.Body$Type (compatible) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW ANNOTATION: java.lang.Deprecated +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.logs.data.LogRecordData (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + === UNCHANGED METHOD: PUBLIC ABSTRACT io.opentelemetry.sdk.logs.data.Body getBody() + +++ NEW ANNOTATION: java.lang.Deprecated + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.common.Value getBodyValue() + +++ NEW ANNOTATION: javax.annotation.Nullable diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..2d8dc8fe717 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-metrics-1.42.0.jar against opentelemetry-sdk-metrics-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..be9647f8b89 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,26 @@ +Comparing source compatibility of opentelemetry-sdk-testing-1.42.0.jar against opentelemetry-sdk-testing-1.41.0.jar +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.testing.assertj.LogRecordDataAssert (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.LogRecordDataAssert hasBody(io.opentelemetry.api.common.Value) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.LogRecordDataAssert hasBodyField(java.lang.String, java.lang.String) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.LogRecordDataAssert hasBodyField(java.lang.String, long) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.LogRecordDataAssert hasBodyField(java.lang.String, double) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.LogRecordDataAssert hasBodyField(java.lang.String, boolean) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.LogRecordDataAssert hasBodyField(java.lang.String, java.lang.String[]) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.LogRecordDataAssert hasBodyField(java.lang.String, long[]) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.LogRecordDataAssert hasBodyField(java.lang.String, double[]) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.LogRecordDataAssert hasBodyField(java.lang.String, boolean[]) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.LogRecordDataAssert hasBodyField(java.lang.String, io.opentelemetry.api.common.Value) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.LogRecordDataAssert hasBodyField(io.opentelemetry.api.common.AttributeKey, java.lang.Object) + GENERIC TEMPLATES: +++ T:java.lang.Object +**** MODIFIED CLASS: PUBLIC ABSTRACT io.opentelemetry.sdk.testing.logs.TestLogRecordData (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.logs.data.Body getBody() + +++ NEW ANNOTATION: java.lang.Deprecated + +++* NEW METHOD: PUBLIC(+) ABSTRACT(+) io.opentelemetry.api.common.Value getBodyValue() + +++ NEW ANNOTATION: javax.annotation.Nullable +**** MODIFIED CLASS: PUBLIC ABSTRACT STATIC io.opentelemetry.sdk.testing.logs.TestLogRecordData$Builder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + *** MODIFIED METHOD: PUBLIC (<- PACKAGE_PROTECTED) NON_ABSTRACT (<- ABSTRACT) io.opentelemetry.sdk.testing.logs.TestLogRecordData$Builder setBody(io.opentelemetry.sdk.logs.data.Body) + +++ NEW ANNOTATION: java.lang.Deprecated + +++* NEW METHOD: PUBLIC(+) ABSTRACT(+) io.opentelemetry.sdk.testing.logs.TestLogRecordData$Builder setBodyValue(io.opentelemetry.api.common.Value) diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..6812c3e1cd1 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-trace-1.42.0.jar against opentelemetry-sdk-trace-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk.txt b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..78c1aa54f96 --- /dev/null +++ b/docs/apidiffs/1.42.0_vs_1.41.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-1.42.0.jar against opentelemetry-sdk-1.41.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-api.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-api.txt new file mode 100644 index 00000000000..b11bbc44363 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-api.txt @@ -0,0 +1,5 @@ +Comparing source compatibility of opentelemetry-api-1.43.0.jar against opentelemetry-api-1.42.0.jar +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.api.baggage.Baggage (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.baggage.BaggageEntry getEntry(java.lang.String) + +++ NEW ANNOTATION: javax.annotation.Nullable diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-context.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-context.txt new file mode 100644 index 00000000000..f01838a847d --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-context.txt @@ -0,0 +1,4 @@ +Comparing source compatibility of opentelemetry-context-1.43.0.jar against opentelemetry-context-1.42.0.jar +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.context.Context (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) STATIC(+) java.util.concurrent.ScheduledExecutorService taskWrapping(java.util.concurrent.ScheduledExecutorService) diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..4f61afb8eb1 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-common-1.43.0.jar against opentelemetry-exporter-common-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..2df260f443c --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-otlp-1.43.0.jar against opentelemetry-exporter-logging-otlp-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..486843d7ca4 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-1.43.0.jar against opentelemetry-exporter-logging-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..67ed4e71728 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-common-1.43.0.jar against opentelemetry-exporter-otlp-common-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..4c3afc0f107 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-1.43.0.jar against opentelemetry-exporter-otlp-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..f9e1ee6e790 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-grpc-managed-channel-1.43.0.jar against opentelemetry-exporter-sender-grpc-managed-channel-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..6c717fd3eec --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-jdk-1.43.0.jar against opentelemetry-exporter-sender-jdk-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..3517b983a81 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-okhttp-1.43.0.jar against opentelemetry-exporter-sender-okhttp-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..bad70283ecf --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-zipkin-1.43.0.jar against opentelemetry-exporter-zipkin-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..a734b74463d --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-kotlin-1.43.0.jar against opentelemetry-extension-kotlin-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..66ba7fd51af --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-trace-propagators-1.43.0.jar against opentelemetry-extension-trace-propagators-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..f2bd7607ce4 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-opentracing-shim-1.43.0.jar against opentelemetry-opentracing-shim-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..c6c737f6f6f --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-common-1.43.0.jar against opentelemetry-sdk-common-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..7e2e0155064 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-spi-1.43.0.jar against opentelemetry-sdk-extension-autoconfigure-spi-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..3d38546b923 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-1.43.0.jar against opentelemetry-sdk-extension-autoconfigure-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..b58ac546dc1 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-jaeger-remote-sampler-1.43.0.jar against opentelemetry-sdk-extension-jaeger-remote-sampler-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..70fcf9a7e92 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-logs-1.43.0.jar against opentelemetry-sdk-logs-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..a6a67209348 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-metrics-1.43.0.jar against opentelemetry-sdk-metrics-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..1b0720243f5 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-testing-1.43.0.jar against opentelemetry-sdk-testing-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..dc14e4e2560 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-trace-1.43.0.jar against opentelemetry-sdk-trace-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk.txt b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..5530c784a30 --- /dev/null +++ b/docs/apidiffs/1.43.0_vs_1.42.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-1.43.0.jar against opentelemetry-sdk-1.42.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-api.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-api.txt new file mode 100644 index 00000000000..9e70186c29f --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-api-1.44.0.jar against opentelemetry-api-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-context.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-context.txt new file mode 100644 index 00000000000..a0b2417ea3d --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-context-1.44.0.jar against opentelemetry-context-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..c33ced08682 --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-common-1.44.0.jar against opentelemetry-exporter-common-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..3384b7a6a8f --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-otlp-1.44.0.jar against opentelemetry-exporter-logging-otlp-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..e0995445a2d --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-1.44.0.jar against opentelemetry-exporter-logging-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..d5f44a6ee30 --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-common-1.44.0.jar against opentelemetry-exporter-otlp-common-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..6b9b06b34dc --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-1.44.0.jar against opentelemetry-exporter-otlp-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..da2b18615cf --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-grpc-managed-channel-1.44.0.jar against opentelemetry-exporter-sender-grpc-managed-channel-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..fcd6cdf22e2 --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-jdk-1.44.0.jar against opentelemetry-exporter-sender-jdk-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..884e240ae22 --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-okhttp-1.44.0.jar against opentelemetry-exporter-sender-okhttp-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..b2241eeb636 --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-zipkin-1.44.0.jar against opentelemetry-exporter-zipkin-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..8044b8f8eec --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-kotlin-1.44.0.jar against opentelemetry-extension-kotlin-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..da2fff0fb55 --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-trace-propagators-1.44.0.jar against opentelemetry-extension-trace-propagators-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..5a3e21de838 --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-opentracing-shim-1.44.0.jar against opentelemetry-opentracing-shim-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..5ae9baeb962 --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-common-1.44.0.jar against opentelemetry-sdk-common-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..2a9f6dfbf2b --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-spi-1.44.0.jar against opentelemetry-sdk-extension-autoconfigure-spi-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..227dd95136c --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-1.44.0.jar against opentelemetry-sdk-extension-autoconfigure-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..a35f25ac548 --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-jaeger-remote-sampler-1.44.0.jar against opentelemetry-sdk-extension-jaeger-remote-sampler-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..8605a4468ed --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-logs-1.44.0.jar against opentelemetry-sdk-logs-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..985d2734842 --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,16 @@ +Comparing source compatibility of opentelemetry-sdk-metrics-1.44.0.jar against opentelemetry-sdk-metrics-1.43.0.jar ++++ NEW INTERFACE: PUBLIC(+) ABSTRACT(+) io.opentelemetry.sdk.metrics.export.CardinalityLimitSelector (not serializable) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + +++ NEW SUPERCLASS: java.lang.Object + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.sdk.metrics.export.CardinalityLimitSelector defaultCardinalityLimitSelector() + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) int getCardinalityLimit(io.opentelemetry.sdk.metrics.InstrumentType) + +++ NEW ANNOTATION: java.lang.FunctionalInterface +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder registerMetricReader(io.opentelemetry.sdk.metrics.export.MetricReader, io.opentelemetry.sdk.metrics.export.CardinalityLimitSelector) +*** MODIFIED CLASS: PUBLIC ABSTRACT io.opentelemetry.sdk.metrics.View (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + *** MODIFIED METHOD: PUBLIC (<- PACKAGE_PROTECTED) ABSTRACT int getCardinalityLimit() +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.metrics.ViewBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + *** MODIFIED METHOD: PUBLIC (<- PACKAGE_PROTECTED) io.opentelemetry.sdk.metrics.ViewBuilder setCardinalityLimit(int) diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..8042c5e1b2d --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-testing-1.44.0.jar against opentelemetry-sdk-testing-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..452716a43f3 --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,7 @@ +Comparing source compatibility of opentelemetry-sdk-trace-1.44.0.jar against opentelemetry-sdk-trace-1.43.0.jar ++++ NEW INTERFACE: PUBLIC(+) ABSTRACT(+) io.opentelemetry.sdk.trace.data.ExceptionEventData (not serializable) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + +++ NEW INTERFACE: io.opentelemetry.sdk.trace.data.EventData + +++ NEW SUPERCLASS: java.lang.Object + +++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.sdk.trace.data.ExceptionEventData create(long, java.lang.Throwable, io.opentelemetry.api.common.Attributes, int) + +++ NEW METHOD: PUBLIC(+) ABSTRACT(+) java.lang.Throwable getException() diff --git a/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk.txt b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..333f8c4f5b0 --- /dev/null +++ b/docs/apidiffs/1.44.0_vs_1.43.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-1.44.0.jar against opentelemetry-sdk-1.43.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-api.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-api.txt new file mode 100644 index 00000000000..227cbe9a7d7 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-api-1.44.1.jar against opentelemetry-api-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-context.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-context.txt new file mode 100644 index 00000000000..1ba6fac89b4 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-context-1.44.1.jar against opentelemetry-context-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..f17cdd1ef90 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-common-1.44.1.jar against opentelemetry-exporter-common-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..8b093eaf0aa --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-otlp-1.44.1.jar against opentelemetry-exporter-logging-otlp-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..c60e2be2842 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-1.44.1.jar against opentelemetry-exporter-logging-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..75bc9a388e8 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-common-1.44.1.jar against opentelemetry-exporter-otlp-common-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..c6b8d754b79 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-1.44.1.jar against opentelemetry-exporter-otlp-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..50b886238aa --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-grpc-managed-channel-1.44.1.jar against opentelemetry-exporter-sender-grpc-managed-channel-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..33c42b914c4 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-jdk-1.44.1.jar against opentelemetry-exporter-sender-jdk-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..9f2ab4ddafb --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-okhttp-1.44.1.jar against opentelemetry-exporter-sender-okhttp-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..29340d90453 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-zipkin-1.44.1.jar against opentelemetry-exporter-zipkin-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..467a926a409 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-kotlin-1.44.1.jar against opentelemetry-extension-kotlin-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..bfcd1605834 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-trace-propagators-1.44.1.jar against opentelemetry-extension-trace-propagators-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..c553192aca9 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-opentracing-shim-1.44.1.jar against opentelemetry-opentracing-shim-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..d07f1dadeb8 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-common-1.44.1.jar against opentelemetry-sdk-common-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..37aa938da9b --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-spi-1.44.1.jar against opentelemetry-sdk-extension-autoconfigure-spi-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..750b0e2c27f --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-1.44.1.jar against opentelemetry-sdk-extension-autoconfigure-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..0643d17dfbb --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-jaeger-remote-sampler-1.44.1.jar against opentelemetry-sdk-extension-jaeger-remote-sampler-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..26b4f4b3e5f --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-logs-1.44.1.jar against opentelemetry-sdk-logs-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..5031b75f9f6 --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-metrics-1.44.1.jar against opentelemetry-sdk-metrics-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..b3c59ff77fd --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-testing-1.44.1.jar against opentelemetry-sdk-testing-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..2feb43d5d6b --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-trace-1.44.1.jar against opentelemetry-sdk-trace-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk.txt b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..37868ac312f --- /dev/null +++ b/docs/apidiffs/1.44.1_vs_1.44.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-1.44.1.jar against opentelemetry-sdk-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-api.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-api.txt new file mode 100644 index 00000000000..bf2bd952ea2 --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-api.txt @@ -0,0 +1,4 @@ +Comparing source compatibility of opentelemetry-api-1.45.0.jar against opentelemetry-api-1.44.0.jar +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.api.trace.SpanBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.trace.SpanBuilder setAttribute(io.opentelemetry.api.common.AttributeKey, int) diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-context.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-context.txt new file mode 100644 index 00000000000..2b2bd88aabf --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-context-1.45.0.jar against opentelemetry-context-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..d17a671090b --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-common-1.45.0.jar against opentelemetry-exporter-common-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..eacd4d7b06d --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-otlp-1.45.0.jar against opentelemetry-exporter-logging-otlp-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..9a73557f2c7 --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-1.45.0.jar against opentelemetry-exporter-logging-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..26804e02640 --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-common-1.45.0.jar against opentelemetry-exporter-otlp-common-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..22aba4564a1 --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-1.45.0.jar against opentelemetry-exporter-otlp-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..198cbb3e26a --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-grpc-managed-channel-1.45.0.jar against opentelemetry-exporter-sender-grpc-managed-channel-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..4dc968493c8 --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-jdk-1.45.0.jar against opentelemetry-exporter-sender-jdk-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..6c13a1dedff --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-okhttp-1.45.0.jar against opentelemetry-exporter-sender-okhttp-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..fd6e10c1e2c --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-zipkin-1.45.0.jar against opentelemetry-exporter-zipkin-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..ab527cf1319 --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-kotlin-1.45.0.jar against opentelemetry-extension-kotlin-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..03fda39decf --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-trace-propagators-1.45.0.jar against opentelemetry-extension-trace-propagators-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..2b7bfb4bd1c --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-opentracing-shim-1.45.0.jar against opentelemetry-opentracing-shim-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..50d8efb7a82 --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-common-1.45.0.jar against opentelemetry-sdk-common-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..39174b4c385 --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-spi-1.45.0.jar against opentelemetry-sdk-extension-autoconfigure-spi-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..fe749f6f68c --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-1.45.0.jar against opentelemetry-sdk-extension-autoconfigure-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..c48ad9ea5c8 --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-jaeger-remote-sampler-1.45.0.jar against opentelemetry-sdk-extension-jaeger-remote-sampler-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..b037e09c771 --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-logs-1.45.0.jar against opentelemetry-sdk-logs-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..80ba32106db --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-metrics-1.45.0.jar against opentelemetry-sdk-metrics-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..257565b8a2b --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-testing-1.45.0.jar against opentelemetry-sdk-testing-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..6f3a3c4eb05 --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-trace-1.45.0.jar against opentelemetry-sdk-trace-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk.txt b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..cb527f8bc36 --- /dev/null +++ b/docs/apidiffs/1.45.0_vs_1.44.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-1.45.0.jar against opentelemetry-sdk-1.44.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-api.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-api.txt new file mode 100644 index 00000000000..7a52ca19294 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-api-1.46.0.jar against opentelemetry-api-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-context.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-context.txt new file mode 100644 index 00000000000..e92b7d7e324 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-context-1.46.0.jar against opentelemetry-context-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..81783a03446 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-common-1.46.0.jar against opentelemetry-exporter-common-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..df84177a6df --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-otlp-1.46.0.jar against opentelemetry-exporter-logging-otlp-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..58252a79e9f --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-1.46.0.jar against opentelemetry-exporter-logging-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..42f4bf2f0b7 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-common-1.46.0.jar against opentelemetry-exporter-otlp-common-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..91523f25014 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-1.46.0.jar against opentelemetry-exporter-otlp-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..18e90db15ab --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-grpc-managed-channel-1.46.0.jar against opentelemetry-exporter-sender-grpc-managed-channel-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..02b60f67e05 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-jdk-1.46.0.jar against opentelemetry-exporter-sender-jdk-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..6b276e92b7a --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-okhttp-1.46.0.jar against opentelemetry-exporter-sender-okhttp-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..5ae6fbf884c --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-zipkin-1.46.0.jar against opentelemetry-exporter-zipkin-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..b19719fe068 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-kotlin-1.46.0.jar against opentelemetry-extension-kotlin-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..45e93a6e2f2 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-trace-propagators-1.46.0.jar against opentelemetry-extension-trace-propagators-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..69f685420e4 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-opentracing-shim-1.46.0.jar against opentelemetry-opentracing-shim-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..c9019153006 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-common-1.46.0.jar against opentelemetry-sdk-common-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..9afe3d165d4 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-spi-1.46.0.jar against opentelemetry-sdk-extension-autoconfigure-spi-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..ba7aa6c29f7 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-1.46.0.jar against opentelemetry-sdk-extension-autoconfigure-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..ca4395b27eb --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-jaeger-remote-sampler-1.46.0.jar against opentelemetry-sdk-extension-jaeger-remote-sampler-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..20a7acf6688 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,16 @@ +Comparing source compatibility of opentelemetry-sdk-logs-1.46.0.jar against opentelemetry-sdk-logs-1.45.0.jar +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.sdk.logs.ReadWriteLogRecord (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) java.lang.Object getAttribute(io.opentelemetry.api.common.AttributeKey) + +++ NEW ANNOTATION: javax.annotation.Nullable + GENERIC TEMPLATES: +++ T:java.lang.Object + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.common.Attributes getAttributes() + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.common.Value getBodyValue() + +++ NEW ANNOTATION: javax.annotation.Nullable + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.common.InstrumentationScopeInfo getInstrumentationScopeInfo() + +++ NEW METHOD: PUBLIC(+) long getObservedTimestampEpochNanos() + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.logs.Severity getSeverity() + +++ NEW METHOD: PUBLIC(+) java.lang.String getSeverityText() + +++ NEW ANNOTATION: javax.annotation.Nullable + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.trace.SpanContext getSpanContext() + +++ NEW METHOD: PUBLIC(+) long getTimestampEpochNanos() diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..471c8ad65a8 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-metrics-1.46.0.jar against opentelemetry-sdk-metrics-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..549c10ad82a --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-testing-1.46.0.jar against opentelemetry-sdk-testing-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..f90c6b4f672 --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-trace-1.46.0.jar against opentelemetry-sdk-trace-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk.txt b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..e405062aabf --- /dev/null +++ b/docs/apidiffs/1.46.0_vs_1.45.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-1.46.0.jar against opentelemetry-sdk-1.45.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-api.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-api.txt new file mode 100644 index 00000000000..9b8cec50e3d --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-api.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-api-1.47.0.jar against opentelemetry-api-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-context.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-context.txt new file mode 100644 index 00000000000..9e926cbce02 --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-context-1.47.0.jar against opentelemetry-context-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..e9ad6c262dd --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-common-1.47.0.jar against opentelemetry-exporter-common-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..19e9e7eee9e --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-otlp-1.47.0.jar against opentelemetry-exporter-logging-otlp-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..f1b1755c49b --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-1.47.0.jar against opentelemetry-exporter-logging-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..c7ba2d7604b --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-common-1.47.0.jar against opentelemetry-exporter-otlp-common-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..c05d0d2101e --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-1.47.0.jar against opentelemetry-exporter-otlp-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..8df2f05993e --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-grpc-managed-channel-1.47.0.jar against opentelemetry-exporter-sender-grpc-managed-channel-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..9a32d2426b1 --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-jdk-1.47.0.jar against opentelemetry-exporter-sender-jdk-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..c1e4ef30e1e --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-okhttp-1.47.0.jar against opentelemetry-exporter-sender-okhttp-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..5728b1c7ebe --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-zipkin-1.47.0.jar against opentelemetry-exporter-zipkin-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..d3bb74e7598 --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-kotlin-1.47.0.jar against opentelemetry-extension-kotlin-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..dd65f61d3ab --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-trace-propagators-1.47.0.jar against opentelemetry-extension-trace-propagators-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..a95c9ed9422 --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-opentracing-shim-1.47.0.jar against opentelemetry-opentracing-shim-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..490ef238eff --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-common.txt @@ -0,0 +1,8 @@ +Comparing source compatibility of opentelemetry-sdk-common-1.47.0.jar against opentelemetry-sdk-common-1.46.0.jar +**** MODIFIED CLASS: PUBLIC ABSTRACT io.opentelemetry.sdk.common.export.RetryPolicy (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++* NEW METHOD: PUBLIC(+) ABSTRACT(+) java.util.function.Predicate getRetryExceptionPredicate() + +++ NEW ANNOTATION: javax.annotation.Nullable +**** MODIFIED CLASS: PUBLIC ABSTRACT STATIC io.opentelemetry.sdk.common.export.RetryPolicy$RetryPolicyBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++* NEW METHOD: PUBLIC(+) ABSTRACT(+) io.opentelemetry.sdk.common.export.RetryPolicy$RetryPolicyBuilder setRetryExceptionPredicate(java.util.function.Predicate) diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..a6c5cb9acba --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-spi-1.47.0.jar against opentelemetry-sdk-extension-autoconfigure-spi-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..e7008c43b4f --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,7 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-1.47.0.jar against opentelemetry-sdk-extension-autoconfigure-1.46.0.jar ++++ NEW CLASS: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.autoconfigure.EnvironmentResourceProvider (not serializable) + +++ CLASS FILE FORMAT VERSION: 52.0 <- n.a. + +++ NEW SUPERCLASS: java.lang.Object + +++ NEW CONSTRUCTOR: PUBLIC(+) EnvironmentResourceProvider() + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.resources.Resource createResource(io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties) + +++ NEW METHOD: PUBLIC(+) int order() diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..1823240ad66 --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-jaeger-remote-sampler-1.47.0.jar against opentelemetry-sdk-extension-jaeger-remote-sampler-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..946e9a23b19 --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-logs-1.47.0.jar against opentelemetry-sdk-logs-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..5a524d47f40 --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-metrics-1.47.0.jar against opentelemetry-sdk-metrics-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..37ea274d77d --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-testing-1.47.0.jar against opentelemetry-sdk-testing-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..5a9f54c51ca --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-trace-1.47.0.jar against opentelemetry-sdk-trace-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk.txt b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..7e212768780 --- /dev/null +++ b/docs/apidiffs/1.47.0_vs_1.46.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-1.47.0.jar against opentelemetry-sdk-1.46.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-api.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-api.txt new file mode 100644 index 00000000000..de788f13512 --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-api.txt @@ -0,0 +1,8 @@ +Comparing source compatibility of opentelemetry-api-1.48.0.jar against opentelemetry-api-1.47.0.jar +*** MODIFIED INTERFACE: PUBLIC ABSTRACT io.opentelemetry.api.logs.LogRecordBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.logs.LogRecordBuilder setAttribute(java.lang.String, java.lang.String) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.logs.LogRecordBuilder setAttribute(java.lang.String, long) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.logs.LogRecordBuilder setAttribute(java.lang.String, double) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.logs.LogRecordBuilder setAttribute(java.lang.String, boolean) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.api.logs.LogRecordBuilder setAttribute(java.lang.String, int) diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-context.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-context.txt new file mode 100644 index 00000000000..3b24635de3f --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-context.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-context-1.48.0.jar against opentelemetry-context-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-common.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-common.txt new file mode 100644 index 00000000000..41b6de6cda4 --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-common-1.48.0.jar against opentelemetry-exporter-common-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-logging-otlp.txt new file mode 100644 index 00000000000..11d31d04e77 --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-logging-otlp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-otlp-1.48.0.jar against opentelemetry-exporter-logging-otlp-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-logging.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-logging.txt new file mode 100644 index 00000000000..a0fdd19735b --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-logging.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-logging-1.48.0.jar against opentelemetry-exporter-logging-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-otlp-common.txt new file mode 100644 index 00000000000..c29eb51623d --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-otlp-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-common-1.48.0.jar against opentelemetry-exporter-otlp-common-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-otlp.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-otlp.txt new file mode 100644 index 00000000000..f5dd0481cd3 --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-otlp.txt @@ -0,0 +1,19 @@ +Comparing source compatibility of opentelemetry-exporter-otlp-1.48.0.jar against opentelemetry-exporter-otlp-1.47.0.jar +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder setServiceClassLoader(java.lang.ClassLoader) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder setServiceClassLoader(java.lang.ClassLoader) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder setServiceClassLoader(java.lang.ClassLoader) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder setServiceClassLoader(java.lang.ClassLoader) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder setServiceClassLoader(java.lang.ClassLoader) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder setServiceClassLoader(java.lang.ClassLoader) diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-sender-grpc-managed-channel.txt new file mode 100644 index 00000000000..e22d383387d --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-grpc-managed-channel-1.48.0.jar against opentelemetry-exporter-sender-grpc-managed-channel-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..6bdb9162c26 --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-jdk-1.48.0.jar against opentelemetry-exporter-sender-jdk-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-sender-okhttp.txt new file mode 100644 index 00000000000..7b009644b8e --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-sender-okhttp.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-okhttp-1.48.0.jar against opentelemetry-exporter-sender-okhttp-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-zipkin.txt new file mode 100644 index 00000000000..74111836a9c --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-exporter-zipkin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-zipkin-1.48.0.jar against opentelemetry-exporter-zipkin-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-extension-kotlin.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-extension-kotlin.txt new file mode 100644 index 00000000000..ed295a0cfbe --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-extension-kotlin.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-kotlin-1.48.0.jar against opentelemetry-extension-kotlin-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-extension-trace-propagators.txt new file mode 100644 index 00000000000..adf485b5aea --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-extension-trace-propagators.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-extension-trace-propagators-1.48.0.jar against opentelemetry-extension-trace-propagators-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-opentracing-shim.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-opentracing-shim.txt new file mode 100644 index 00000000000..ece95e4201f --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-opentracing-shim.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-opentracing-shim-1.48.0.jar against opentelemetry-opentracing-shim-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-common.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-common.txt new file mode 100644 index 00000000000..6e9a0615356 --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-common.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-common-1.48.0.jar against opentelemetry-sdk-common-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-extension-autoconfigure-spi.txt new file mode 100644 index 00000000000..bc568f86e98 --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-spi-1.48.0.jar against opentelemetry-sdk-extension-autoconfigure-spi-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-extension-autoconfigure.txt new file mode 100644 index 00000000000..ed248b7e6a9 --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-extension-autoconfigure.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-1.48.0.jar against opentelemetry-sdk-extension-autoconfigure-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt new file mode 100644 index 00000000000..98b608173eb --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-extension-jaeger-remote-sampler-1.48.0.jar against opentelemetry-sdk-extension-jaeger-remote-sampler-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-logs.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-logs.txt new file mode 100644 index 00000000000..db0e655c47c --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-logs.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-logs-1.48.0.jar against opentelemetry-sdk-logs-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-metrics.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-metrics.txt new file mode 100644 index 00000000000..83956eff6a5 --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-metrics.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-metrics-1.48.0.jar against opentelemetry-sdk-metrics-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-testing.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-testing.txt new file mode 100644 index 00000000000..111061e57cf --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-testing.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-testing-1.48.0.jar against opentelemetry-sdk-testing-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-trace.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-trace.txt new file mode 100644 index 00000000000..d02ca48d06a --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk-trace.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-trace-1.48.0.jar against opentelemetry-sdk-trace-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk.txt b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk.txt new file mode 100644 index 00000000000..d5907c73bae --- /dev/null +++ b/docs/apidiffs/1.48.0_vs_1.47.0/opentelemetry-sdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-sdk-1.48.0.jar against opentelemetry-sdk-1.47.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-api.txt b/docs/apidiffs/current_vs_latest/opentelemetry-api.txt index df26146497b..d7c6fb68622 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-api.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-api.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-api-1.49.0-SNAPSHOT.jar against opentelemetry-api-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-context.txt b/docs/apidiffs/current_vs_latest/opentelemetry-context.txt index df26146497b..3b637cee8a4 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-context.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-context.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-context-1.49.0-SNAPSHOT.jar against opentelemetry-context-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-common.txt b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-common.txt index df26146497b..570550ff81f 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-common.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-common.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-exporter-common-1.49.0-SNAPSHOT.jar against opentelemetry-exporter-common-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-logging-otlp.txt b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-logging-otlp.txt index df26146497b..5f08806fef4 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-logging-otlp.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-logging-otlp.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-exporter-logging-otlp-1.49.0-SNAPSHOT.jar against opentelemetry-exporter-logging-otlp-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-logging.txt b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-logging.txt index ec040faaf8b..9ab45b7a17f 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-logging.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-logging.txt @@ -1,10 +1,2 @@ -Comparing source compatibility of against -*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.logging.LoggingMetricExporter (not serializable) - === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 - +++ NEW METHOD: PUBLIC(+) java.lang.String toString() -*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.logging.LoggingSpanExporter (not serializable) - === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 - +++ NEW METHOD: PUBLIC(+) java.lang.String toString() -*** MODIFIED CLASS: PUBLIC io.opentelemetry.exporter.logging.SystemOutLogRecordExporter (not serializable) - === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 - +++ NEW METHOD: PUBLIC(+) java.lang.String toString() +Comparing source compatibility of opentelemetry-exporter-logging-1.49.0-SNAPSHOT.jar against opentelemetry-exporter-logging-1.48.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-otlp-common.txt b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-otlp-common.txt index df26146497b..c02b046db32 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-otlp-common.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-otlp-common.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-exporter-otlp-common-1.49.0-SNAPSHOT.jar against opentelemetry-exporter-otlp-common-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-otlp.txt b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-otlp.txt index df26146497b..018e2e3884d 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-otlp.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-otlp.txt @@ -1,2 +1,19 @@ -Comparing source compatibility of against -No changes. \ No newline at end of file +Comparing source compatibility of opentelemetry-exporter-otlp-1.49.0-SNAPSHOT.jar against opentelemetry-exporter-otlp-1.48.0.jar +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder setExecutorService(java.util.concurrent.ExecutorService) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder setExecutorService(java.util.concurrent.ExecutorService) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder setExecutorService(java.util.concurrent.ExecutorService) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder setExecutorService(java.util.concurrent.ExecutorService) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder setExecutorService(java.util.concurrent.ExecutorService) +*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder (not serializable) + === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder setExecutorService(java.util.concurrent.ExecutorService) diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-sender-grpc-managed-channel.txt b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-sender-grpc-managed-channel.txt index df26146497b..9c3f334ad4d 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-sender-grpc-managed-channel.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-sender-grpc-managed-channel.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-exporter-sender-grpc-managed-channel-1.49.0-SNAPSHOT.jar against opentelemetry-exporter-sender-grpc-managed-channel-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-sender-jdk.txt b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-sender-jdk.txt new file mode 100644 index 00000000000..afe9b8526d5 --- /dev/null +++ b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-sender-jdk.txt @@ -0,0 +1,2 @@ +Comparing source compatibility of opentelemetry-exporter-sender-jdk-1.49.0-SNAPSHOT.jar against opentelemetry-exporter-sender-jdk-1.48.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-sender-okhttp.txt b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-sender-okhttp.txt index df26146497b..0815487f216 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-sender-okhttp.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-sender-okhttp.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-exporter-sender-okhttp-1.49.0-SNAPSHOT.jar against opentelemetry-exporter-sender-okhttp-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-zipkin.txt b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-zipkin.txt index 772e014e5a0..6b359d37696 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-exporter-zipkin.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-exporter-zipkin.txt @@ -1,4 +1,2 @@ -Comparing source compatibility of against -*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.exporter.zipkin.ZipkinSpanExporter (not serializable) - === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 - +++ NEW METHOD: PUBLIC(+) java.lang.String toString() +Comparing source compatibility of opentelemetry-exporter-zipkin-1.49.0-SNAPSHOT.jar against opentelemetry-exporter-zipkin-1.48.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-extension-kotlin.txt b/docs/apidiffs/current_vs_latest/opentelemetry-extension-kotlin.txt index df26146497b..7e059b2f568 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-extension-kotlin.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-extension-kotlin.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-extension-kotlin-1.49.0-SNAPSHOT.jar against opentelemetry-extension-kotlin-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-extension-trace-propagators.txt b/docs/apidiffs/current_vs_latest/opentelemetry-extension-trace-propagators.txt index df26146497b..09c848c2104 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-extension-trace-propagators.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-extension-trace-propagators.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-extension-trace-propagators-1.49.0-SNAPSHOT.jar against opentelemetry-extension-trace-propagators-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-opentracing-shim.txt b/docs/apidiffs/current_vs_latest/opentelemetry-opentracing-shim.txt index df26146497b..0c209995480 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-opentracing-shim.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-opentracing-shim.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-opentracing-shim-1.49.0-SNAPSHOT.jar against opentelemetry-opentracing-shim-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-common.txt b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-common.txt index df26146497b..87bf61b8638 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-common.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-common.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-sdk-common-1.49.0-SNAPSHOT.jar against opentelemetry-sdk-common-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-extension-autoconfigure-spi.txt b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-extension-autoconfigure-spi.txt index df26146497b..9d3fc17fdaa 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-extension-autoconfigure-spi.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-extension-autoconfigure-spi.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-spi-1.49.0-SNAPSHOT.jar against opentelemetry-sdk-extension-autoconfigure-spi-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-extension-autoconfigure.txt b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-extension-autoconfigure.txt index df26146497b..d8f56fdc42b 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-extension-autoconfigure.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-extension-autoconfigure.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-sdk-extension-autoconfigure-1.49.0-SNAPSHOT.jar against opentelemetry-sdk-extension-autoconfigure-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-extension-jaeger-remote-sampler.txt b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-extension-jaeger-remote-sampler.txt index df26146497b..c7caaf55da2 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-extension-jaeger-remote-sampler.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-extension-jaeger-remote-sampler.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-sdk-extension-jaeger-remote-sampler-1.49.0-SNAPSHOT.jar against opentelemetry-sdk-extension-jaeger-remote-sampler-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-logs.txt b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-logs.txt index df26146497b..401256abc53 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-logs.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-logs.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-sdk-logs-1.49.0-SNAPSHOT.jar against opentelemetry-sdk-logs-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-metrics.txt b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-metrics.txt index dff1c969ed8..d3dba97c51d 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-metrics.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-metrics.txt @@ -1,4 +1,2 @@ -Comparing source compatibility of against -*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.metrics.ViewBuilder (not serializable) - === CLASS FILE FORMAT VERSION: 52.0 <- 52.0 - +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.metrics.ViewBuilder setAttributeFilter(java.util.Set) +Comparing source compatibility of opentelemetry-sdk-metrics-1.49.0-SNAPSHOT.jar against opentelemetry-sdk-metrics-1.48.0.jar +No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-testing.txt b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-testing.txt index df26146497b..2d1a38296f1 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-testing.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-testing.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-sdk-testing-1.49.0-SNAPSHOT.jar against opentelemetry-sdk-testing-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-trace.txt b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-trace.txt index df26146497b..b5c1d3c6dad 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-trace.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-trace.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-sdk-trace-1.49.0-SNAPSHOT.jar against opentelemetry-sdk-trace-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-sdk.txt b/docs/apidiffs/current_vs_latest/opentelemetry-sdk.txt index df26146497b..d724acf4872 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-sdk.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-sdk.txt @@ -1,2 +1,2 @@ -Comparing source compatibility of against +Comparing source compatibility of opentelemetry-sdk-1.49.0-SNAPSHOT.jar against opentelemetry-sdk-1.48.0.jar No changes. \ No newline at end of file diff --git a/docs/jmh.md b/docs/jmh.md index 698e0174419..6a399072c18 100644 --- a/docs/jmh.md +++ b/docs/jmh.md @@ -1,7 +1,7 @@ # how to jmh -[jmh] (Java Benchmark Harness) is a tool for running benchmarks and reporting results. +[jmh](https://github.com/openjdk/jmh) is a tool for running benchmarks and reporting results. opentelemetry-java has a lot of micro benchmarks. They live inside `jmh` directories in the appropriate module. diff --git a/docs/sdk-configuration.md b/docs/sdk-configuration-design.md similarity index 99% rename from docs/sdk-configuration.md rename to docs/sdk-configuration-design.md index 9476ad5a572..2ecc7f4341c 100644 --- a/docs/sdk-configuration.md +++ b/docs/sdk-configuration-design.md @@ -1,7 +1,8 @@ # 🚨 Archival Use Only! 🚨 > This is a historical design document and is not updated with evolving APIs. For up-to-date > examples of SDK configuration see the -> [documentation](https://opentelemetry.io/docs/java/manual_instrumentation/). +> [manual instrumentation documentation](https://opentelemetry.io/docs/languages/java/instrumentation/) +> and the [SDK configuration documentation](https://opentelemetry.io/docs/languages/java/configuration/). > This document is maintained here for historical purposes only, and it is intended to > help future readers to understand the rationale behind certain design decisions. diff --git a/exporters/common/build.gradle.kts b/exporters/common/build.gradle.kts index d6cb5ebe86f..8d5f38224e7 100644 --- a/exporters/common/build.gradle.kts +++ b/exporters/common/build.gradle.kts @@ -11,8 +11,11 @@ otelJava.moduleName.set("io.opentelemetry.exporter.internal") val versions: Map by project dependencies { api(project(":api:all")) + api(project(":sdk-extensions:autoconfigure-spi")) + compileOnly(project(":api:incubator")) compileOnly(project(":sdk:common")) + compileOnly(project(":exporters:common:compile-stub")) compileOnly("org.codehaus.mojo:animal-sniffer-annotations") @@ -21,6 +24,8 @@ dependencies { // We include helpers shared by gRPC exporters but do not want to impose these // dependency on all of our consumers. compileOnly("com.fasterxml.jackson.core:jackson-core") + // sun.misc.Unsafe from the JDK isn't found by the compiler, we provide our own trimmed down + // version that we can compile against. compileOnly("io.grpc:grpc-stub") testImplementation(project(":sdk:common")) @@ -30,6 +35,7 @@ dependencies { testImplementation("org.skyscreamer:jsonassert") testImplementation("com.google.api.grpc:proto-google-common-protos") testImplementation("io.grpc:grpc-testing") + testImplementation("edu.berkeley.cs.jqf:jqf-fuzz") testRuntimeOnly("io.grpc:grpc-netty-shaded") } @@ -62,6 +68,9 @@ testing { } } } + suites { + register("testWithoutUnsafe") {} + } } tasks { diff --git a/exporters/common/compile-stub/build.gradle.kts b/exporters/common/compile-stub/build.gradle.kts new file mode 100644 index 00000000000..f93bd1883c9 --- /dev/null +++ b/exporters/common/compile-stub/build.gradle.kts @@ -0,0 +1,6 @@ +plugins { + id("otel.java-conventions") +} + +description = "OpenTelemetry Exporter Compile Stub" +otelJava.moduleName.set("io.opentelemetry.exporter.internal.compile-stub") diff --git a/exporters/common/compile-stub/src/main/java/sun/misc/Unsafe.java b/exporters/common/compile-stub/src/main/java/sun/misc/Unsafe.java new file mode 100644 index 00000000000..48b37ad371f --- /dev/null +++ b/exporters/common/compile-stub/src/main/java/sun/misc/Unsafe.java @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package sun.misc; + +import java.lang.reflect.Field; + +/** + * sun.misc.Unsafe from the JDK isn't found by the compiler, we provide our own trimmed down version + * that we can compile against. + */ +public class Unsafe { + + public long objectFieldOffset(Field f) { + return -1; + } + + public Object getObject(Object o, long offset) { + return null; + } + + public byte getByte(Object o, long offset) { + return 0; + } + + public int arrayBaseOffset(Class arrayClass) { + return 0; + } + + public long getLong(Object o, long offset) { + return 0; + } +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/ExporterBuilderUtil.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/ExporterBuilderUtil.java index d45bdb09283..9e93cc38ab3 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/ExporterBuilderUtil.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/ExporterBuilderUtil.java @@ -5,8 +5,21 @@ package io.opentelemetry.exporter.internal; +import static io.opentelemetry.sdk.metrics.Aggregation.explicitBucketHistogram; + +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; +import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; +import io.opentelemetry.sdk.metrics.internal.aggregator.AggregationUtil; import java.net.URI; import java.net.URISyntaxException; +import java.util.Locale; +import java.util.function.Consumer; /** * Utilities for exporter builders. @@ -33,5 +46,83 @@ public static URI validateEndpoint(String endpoint) { return uri; } + /** Invoke the {@code memoryModeConsumer} with the configured {@link MemoryMode}. */ + public static void configureExporterMemoryMode( + ConfigProperties config, Consumer memoryModeConsumer) { + String memoryModeStr = config.getString("otel.java.exporter.memory_mode"); + if (memoryModeStr == null) { + return; + } + MemoryMode memoryMode; + try { + memoryMode = MemoryMode.valueOf(memoryModeStr.toUpperCase(Locale.ROOT)); + } catch (IllegalArgumentException e) { + throw new ConfigurationException("Unrecognized memory mode: " + memoryModeStr, e); + } + memoryModeConsumer.accept(memoryMode); + } + + /** + * Invoke the {@code defaultAggregationSelectorConsumer} with the configured {@link + * DefaultAggregationSelector}. + */ + public static void configureHistogramDefaultAggregation( + String defaultHistogramAggregation, + Consumer defaultAggregationSelectorConsumer) { + if (AggregationUtil.aggregationName(Aggregation.base2ExponentialBucketHistogram()) + .equalsIgnoreCase(defaultHistogramAggregation)) { + defaultAggregationSelectorConsumer.accept( + DefaultAggregationSelector.getDefault() + .with(InstrumentType.HISTOGRAM, Aggregation.base2ExponentialBucketHistogram())); + } else if (!AggregationUtil.aggregationName(explicitBucketHistogram()) + .equalsIgnoreCase(defaultHistogramAggregation)) { + throw new ConfigurationException( + "Unrecognized default histogram aggregation: " + defaultHistogramAggregation); + } + } + + /** + * Invoke the {@code aggregationTemporalitySelectorConsumer} with the configured {@link + * AggregationTemporality}. + */ + public static void configureOtlpAggregationTemporality( + ConfigProperties config, + Consumer aggregationTemporalitySelectorConsumer) { + String temporalityStr = config.getString("otel.exporter.otlp.metrics.temporality.preference"); + if (temporalityStr == null) { + return; + } + AggregationTemporalitySelector temporalitySelector; + switch (temporalityStr.toLowerCase(Locale.ROOT)) { + case "cumulative": + temporalitySelector = AggregationTemporalitySelector.alwaysCumulative(); + break; + case "delta": + temporalitySelector = AggregationTemporalitySelector.deltaPreferred(); + break; + case "lowmemory": + temporalitySelector = AggregationTemporalitySelector.lowMemory(); + break; + default: + throw new ConfigurationException("Unrecognized aggregation temporality: " + temporalityStr); + } + aggregationTemporalitySelectorConsumer.accept(temporalitySelector); + } + + /** + * Invoke the {@code defaultAggregationSelectorConsumer} with the configured {@link + * DefaultAggregationSelector}. + */ + public static void configureOtlpHistogramDefaultAggregation( + ConfigProperties config, + Consumer defaultAggregationSelectorConsumer) { + String defaultHistogramAggregation = + config.getString("otel.exporter.otlp.metrics.default.histogram.aggregation"); + if (defaultHistogramAggregation != null) { + configureHistogramDefaultAggregation( + defaultHistogramAggregation, defaultAggregationSelectorConsumer); + } + } + private ExporterBuilderUtil() {} } diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/FailedExportException.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/FailedExportException.java new file mode 100644 index 00000000000..3d229514108 --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/FailedExportException.java @@ -0,0 +1,136 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal; + +import io.opentelemetry.exporter.internal.grpc.GrpcResponse; +import io.opentelemetry.exporter.internal.http.HttpSender; +import javax.annotation.Nullable; + +/** + * Represents the failure of a gRPC or HTTP exporter. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public abstract class FailedExportException extends Exception { + + private static final long serialVersionUID = 6988924855140178789L; + + private FailedExportException(@Nullable Throwable cause) { + super(cause); + } + + /** Indicates an HTTP export failed after receiving a response from the server. */ + public static HttpExportException httpFailedWithResponse(HttpSender.Response response) { + return new HttpExportException(response, null); + } + + /** Indicates an HTTP export failed exceptionally without receiving a response from the server. */ + public static HttpExportException httpFailedExceptionally(Throwable cause) { + return new HttpExportException(null, cause); + } + + /** Indicates a gRPC export failed after receiving a response from the server. */ + public static GrpcExportException grpcFailedWithResponse(GrpcResponse response) { + return new GrpcExportException(response, null); + } + + /** Indicates a gRPC export failed exceptionally without receiving a response from the server. */ + public static GrpcExportException grpcFailedExceptionally(Throwable cause) { + return new GrpcExportException(null, cause); + } + + /** Returns true if the export failed with a response from the server. */ + public abstract boolean failedWithResponse(); + + /** + * Represents the failure of an HTTP exporter. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ + public static final class HttpExportException extends FailedExportException { + + private static final long serialVersionUID = -6787390183017184775L; + + @Nullable private final HttpSender.Response response; + @Nullable private final Throwable cause; + + private HttpExportException(@Nullable HttpSender.Response response, @Nullable Throwable cause) { + super(cause); + this.response = response; + this.cause = cause; + } + + @Override + public boolean failedWithResponse() { + return response != null; + } + + /** + * Returns the response if the export failed with a response from the server, or null if the + * export failed exceptionally with no response. + */ + @Nullable + public HttpSender.Response getResponse() { + return response; + } + + /** + * Returns the exceptional cause of failure, or null if the export failed with a response from + * the server. + */ + @Nullable + @Override + public Throwable getCause() { + return cause; + } + } + + /** + * Represents the failure of a gRPC exporter. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ + public static final class GrpcExportException extends FailedExportException { + + private static final long serialVersionUID = -9157548250286695364L; + + @Nullable private final GrpcResponse response; + @Nullable private final Throwable cause; + + private GrpcExportException(@Nullable GrpcResponse response, @Nullable Throwable cause) { + super(cause); + this.response = response; + this.cause = cause; + } + + @Override + public boolean failedWithResponse() { + return response != null; + } + + /** + * Returns the response if the export failed with a response from the server, or null if the + * export failed exceptionally with no response. + */ + @Nullable + public GrpcResponse getResponse() { + return response; + } + + /** + * Returns the exceptional cause of failure, or null if the export failed with a response from + * the server. + */ + @Nullable + @Override + public Throwable getCause() { + return cause; + } + } +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/IncubatingExporterBuilderUtil.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/IncubatingExporterBuilderUtil.java new file mode 100644 index 00000000000..f5992879cf8 --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/IncubatingExporterBuilderUtil.java @@ -0,0 +1,93 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal; + +import static io.opentelemetry.sdk.metrics.Aggregation.explicitBucketHistogram; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; +import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; +import io.opentelemetry.sdk.metrics.internal.aggregator.AggregationUtil; +import java.util.Locale; +import java.util.function.Consumer; + +/** + * Utilities for exporter builders. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class IncubatingExporterBuilderUtil { + + /** Invoke the {@code memoryModeConsumer} with the configured {@link MemoryMode}. */ + public static void configureExporterMemoryMode( + DeclarativeConfigProperties config, Consumer memoryModeConsumer) { + String memoryModeStr = config.getString("memory_mode"); + if (memoryModeStr == null) { + return; + } + MemoryMode memoryMode; + try { + memoryMode = MemoryMode.valueOf(memoryModeStr.toUpperCase(Locale.ROOT)); + } catch (IllegalArgumentException e) { + throw new ConfigurationException("Unrecognized memory_mode: " + memoryModeStr, e); + } + memoryModeConsumer.accept(memoryMode); + } + + public static void configureOtlpAggregationTemporality( + DeclarativeConfigProperties config, + Consumer aggregationTemporalitySelectorConsumer) { + String temporalityStr = config.getString("temporality_preference"); + if (temporalityStr == null) { + return; + } + AggregationTemporalitySelector temporalitySelector; + switch (temporalityStr.toLowerCase(Locale.ROOT)) { + case "cumulative": + temporalitySelector = AggregationTemporalitySelector.alwaysCumulative(); + break; + case "delta": + temporalitySelector = AggregationTemporalitySelector.deltaPreferred(); + break; + case "lowmemory": + temporalitySelector = AggregationTemporalitySelector.lowMemory(); + break; + default: + throw new ConfigurationException("Unrecognized temporality_preference: " + temporalityStr); + } + aggregationTemporalitySelectorConsumer.accept(temporalitySelector); + } + + /** + * Invoke the {@code defaultAggregationSelectorConsumer} with the configured {@link + * DefaultAggregationSelector}. + */ + public static void configureOtlpHistogramDefaultAggregation( + DeclarativeConfigProperties config, + Consumer defaultAggregationSelectorConsumer) { + String defaultHistogramAggregation = config.getString("default_histogram_aggregation"); + if (defaultHistogramAggregation == null) { + return; + } + if (AggregationUtil.aggregationName(Aggregation.base2ExponentialBucketHistogram()) + .equalsIgnoreCase(defaultHistogramAggregation)) { + defaultAggregationSelectorConsumer.accept( + DefaultAggregationSelector.getDefault() + .with(InstrumentType.HISTOGRAM, Aggregation.base2ExponentialBucketHistogram())); + } else if (!AggregationUtil.aggregationName(explicitBucketHistogram()) + .equalsIgnoreCase(defaultHistogramAggregation)) { + throw new ConfigurationException( + "Unrecognized default_histogram_aggregation: " + defaultHistogramAggregation); + } + } + + private IncubatingExporterBuilderUtil() {} +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/InstrumentationUtil.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/InstrumentationUtil.java new file mode 100644 index 00000000000..9a88fe85060 --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/InstrumentationUtil.java @@ -0,0 +1,40 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal; + +import io.opentelemetry.context.Context; + +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + * + * @deprecated use {@link io.opentelemetry.api.internal.InstrumentationUtil} instead. This class + * should be removed once instrumentation does not refer to it anymore. + */ +@Deprecated +public final class InstrumentationUtil { + + private InstrumentationUtil() {} + + /** + * Adds a Context boolean key that will allow to identify HTTP calls coming from OTel exporters. + * The key later be checked by an automatic instrumentation to avoid tracing OTel exporter's + * calls. + */ + public static void suppressInstrumentation(Runnable runnable) { + io.opentelemetry.api.internal.InstrumentationUtil.suppressInstrumentation(runnable); + } + + /** + * Checks if an automatic instrumentation should be suppressed with the provided Context. + * + * @return TRUE to suppress the automatic instrumentation, FALSE to continue with the + * instrumentation. + */ + public static boolean shouldSuppressInstrumentation(Context context) { + return io.opentelemetry.api.internal.InstrumentationUtil.shouldSuppressInstrumentation(context); + } +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/TlsUtil.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/TlsUtil.java index 4d177402241..17661a06eb1 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/TlsUtil.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/TlsUtil.java @@ -20,7 +20,6 @@ import java.security.cert.Certificate; import java.security.cert.CertificateException; import java.security.cert.CertificateFactory; -import java.security.cert.X509Certificate; import java.security.spec.InvalidKeySpecException; import java.security.spec.PKCS8EncodedKeySpec; import java.util.ArrayList; @@ -33,7 +32,6 @@ import javax.net.ssl.TrustManagerFactory; import javax.net.ssl.X509KeyManager; import javax.net.ssl.X509TrustManager; -import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement; /** * Utilities for working with TLS. @@ -78,14 +76,11 @@ public static X509KeyManager keyManager(byte[] privateKeyPem, byte[] certificate PrivateKey key = generatePrivateKey(keySpec, SUPPORTED_KEY_FACTORIES); CertificateFactory cf = CertificateFactory.getInstance("X.509"); - - List chain = new ArrayList<>(); - ByteArrayInputStream is = new ByteArrayInputStream(certificatePem); - while (is.available() > 0) { - chain.add(cf.generateCertificate(is)); - } - + // pass the input stream to generateCertificates to get a list of certificates + // generateCertificates can handle multiple certificates in a single input stream + // including PEM files with explanatory text + List chain = (List) cf.generateCertificates(is); ks.setKeyEntry("trusted", key, "".toCharArray(), chain.toArray(new Certificate[] {})); KeyManagerFactory kmf = @@ -127,9 +122,13 @@ public static X509TrustManager trustManager(byte[] trustedCertificatesPem) throw ByteArrayInputStream is = new ByteArrayInputStream(trustedCertificatesPem); CertificateFactory factory = CertificateFactory.getInstance("X.509"); int i = 0; - while (is.available() > 0) { - X509Certificate cert = (X509Certificate) factory.generateCertificate(is); - ks.setCertificateEntry("cert_" + i, cert); + // pass the input stream to generateCertificates to get a list of certificates + // generateCertificates can handle multiple certificates in a single input stream + // including PEM files with explanatory text + List certificates = + (List) factory.generateCertificates(is); + for (Certificate certificate : certificates) { + ks.setCertificateEntry("cert_" + i, certificate); i++; } @@ -142,9 +141,6 @@ public static X509TrustManager trustManager(byte[] trustedCertificatesPem) throw } } - // We catch linkage error to provide a better exception message on Android. - // https://github.com/open-telemetry/opentelemetry-java/issues/4533 - @IgnoreJRERequirement // Visible for testing static byte[] decodePem(byte[] pem) { String pemStr = new String(pem, StandardCharsets.UTF_8).trim(); @@ -157,12 +153,6 @@ static byte[] decodePem(byte[] pem) { pemStr.substring(PEM_KEY_HEADER.length(), pemStr.length() - PEM_KEY_FOOTER.length()); String content = contentWithNewLines.replaceAll("\\s", ""); - try { - return Base64.getDecoder().decode(content); - } catch (LinkageError unused) { - throw new IllegalArgumentException( - "PEM private keys are currently not supported on Android. " - + "You may try a key encoded as DER."); - } + return Base64.getDecoder().decode(content); } } diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/auth/Authenticator.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/auth/Authenticator.java deleted file mode 100644 index 7ad2547ee20..00000000000 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/auth/Authenticator.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.internal.auth; - -import io.opentelemetry.exporter.internal.grpc.GrpcExporterBuilder; -import io.opentelemetry.exporter.internal.http.HttpExporterBuilder; -import java.lang.reflect.Field; -import java.util.Map; - -/** - * This class is internal and is hence not for public use. Its APIs are unstable and can change at - * any time. - * - *

Allow users of OTLP-OkHttp exporters to add support for authentication. - */ -public interface Authenticator { - - /** - * Method called by the exporter to get headers to be used on a request that requires - * authentication. - * - * @return Headers to add to the request - */ - Map getHeaders(); - - /** - * Reflectively access a {@link GrpcExporterBuilder}, or {@link HttpExporterBuilder} instance in - * field called "delegate" of the instance, and set the {@link Authenticator}. - * - * @param builder export builder to modify - * @param authenticator authenticator to set on builder - * @throws IllegalArgumentException if the instance does not contain a field called "delegate" of - * a supported type. - */ - static void setAuthenticatorOnDelegate(Object builder, Authenticator authenticator) { - try { - Field field = builder.getClass().getDeclaredField("delegate"); - field.setAccessible(true); - Object value = field.get(builder); - if (value instanceof GrpcExporterBuilder) { - throw new IllegalArgumentException("GrpcExporterBuilder not supported yet."); - } else if (value instanceof HttpExporterBuilder) { - ((HttpExporterBuilder) value).setAuthenticator(authenticator); - } else { - throw new IllegalArgumentException( - "Delegate field is not type DefaultGrpcExporterBuilder or OkHttpGrpcExporterBuilder."); - } - } catch (NoSuchFieldException | IllegalAccessException e) { - throw new IllegalArgumentException("Unable to access delegate reflectively.", e); - } - } -} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/compression/Compressor.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/compression/Compressor.java new file mode 100644 index 00000000000..71894cc9d4a --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/compression/Compressor.java @@ -0,0 +1,32 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.compression; + +import java.io.IOException; +import java.io.OutputStream; +import javax.annotation.concurrent.ThreadSafe; + +/** + * An abstraction for compressing messages. Implementation MUST be thread safe as the same instance + * is expected to be used many times and concurrently. Instances are usually singletons. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@ThreadSafe +public interface Compressor { + + /** + * The name of the compressor encoding. + * + *

Used to identify the compressor during configuration and to populate the {@code + * Content-Encoding} header. + */ + String getEncoding(); + + /** Wrap the {@code outputStream} with a compressing output stream. */ + OutputStream compress(OutputStream outputStream) throws IOException; +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/compression/CompressorProvider.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/compression/CompressorProvider.java new file mode 100644 index 00000000000..6b4518f1ea0 --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/compression/CompressorProvider.java @@ -0,0 +1,18 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.compression; + +/** + * A service provider interface (SPI) for providing {@link Compressor}s. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public interface CompressorProvider { + + /** Return the {@link Compressor}. */ + Compressor getInstance(); +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/compression/CompressorUtil.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/compression/CompressorUtil.java new file mode 100644 index 00000000000..9748ea508ad --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/compression/CompressorUtil.java @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.compression; + +import static io.opentelemetry.api.internal.Utils.checkArgument; +import static java.util.stream.Collectors.joining; + +import java.util.HashMap; +import java.util.Map; +import java.util.ServiceLoader; +import java.util.Set; +import javax.annotation.Nullable; + +/** + * Utilities for resolving SPI {@link Compressor}s. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + * @see CompressorProvider + */ +public final class CompressorUtil { + + private static final Map compressorRegistry = buildCompressorRegistry(); + + private CompressorUtil() {} + + /** + * Validate that the {@code compressionMethod} is "none" or matches a registered compressor. + * + * @return {@code null} if {@code compressionMethod} is "none" or the registered compressor + * @throws IllegalArgumentException if no match is found + */ + @Nullable + public static Compressor validateAndResolveCompressor(String compressionMethod) { + Set supportedEncodings = compressorRegistry.keySet(); + Compressor compressor = compressorRegistry.get(compressionMethod); + checkArgument( + "none".equals(compressionMethod) || compressor != null, + "Unsupported compressionMethod. Compression method must be \"none\" or one of: " + + supportedEncodings.stream().collect(joining(",", "[", "]"))); + return compressor; + } + + private static Map buildCompressorRegistry() { + Map compressors = new HashMap<>(); + for (CompressorProvider spi : + ServiceLoader.load(CompressorProvider.class, CompressorUtil.class.getClassLoader())) { + Compressor compressor = spi.getInstance(); + compressors.put(compressor.getEncoding(), compressor); + } + // Hardcode gzip compressor + compressors.put(GzipCompressor.getInstance().getEncoding(), GzipCompressor.getInstance()); + return compressors; + } +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/compression/GzipCompressor.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/compression/GzipCompressor.java new file mode 100644 index 00000000000..7395fdb41b1 --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/compression/GzipCompressor.java @@ -0,0 +1,37 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.compression; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.zip.GZIPOutputStream; + +/** + * Gzip {@link Compressor}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class GzipCompressor implements Compressor { + + private static final GzipCompressor INSTANCE = new GzipCompressor(); + + private GzipCompressor() {} + + public static GzipCompressor getInstance() { + return INSTANCE; + } + + @Override + public String getEncoding() { + return "gzip"; + } + + @Override + public OutputStream compress(OutputStream outputStream) throws IOException { + return new GZIPOutputStream(outputStream); + } +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcExporter.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcExporter.java index 6a74ba5df8a..128ddc436d5 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcExporter.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcExporter.java @@ -10,6 +10,7 @@ import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.exporter.internal.ExporterMetrics; +import io.opentelemetry.exporter.internal.FailedExportException; import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.internal.ThrottlingLogger; @@ -60,50 +61,68 @@ public CompletableResultCode export(T exportRequest, int numItems) { grpcSender.send( exportRequest, - () -> { - exporterMetrics.addSuccess(numItems); - result.succeed(); - }, - (response, throwable) -> { - exporterMetrics.addFailed(numItems); - switch (response.grpcStatusValue()) { - case GRPC_STATUS_UNIMPLEMENTED: - if (loggedUnimplemented.compareAndSet(false, true)) { - GrpcExporterUtil.logUnimplemented( - internalLogger, type, response.grpcStatusDescription()); - } - break; - case GRPC_STATUS_UNAVAILABLE: - logger.log( - Level.SEVERE, - "Failed to export " - + type - + "s. Server is UNAVAILABLE. " - + "Make sure your collector is running and reachable from this network. " - + "Full error message:" - + response.grpcStatusDescription()); - break; - default: - logger.log( - Level.WARNING, - "Failed to export " - + type - + "s. Server responded with gRPC status code " - + response.grpcStatusValue() - + ". Error message: " - + response.grpcStatusDescription()); - break; - } - if (logger.isLoggable(Level.FINEST)) { - logger.log( - Level.FINEST, "Failed to export " + type + "s. Details follow: " + throwable); - } - result.fail(); - }); + grpcResponse -> onResponse(result, numItems, grpcResponse), + throwable -> onError(result, numItems, throwable)); return result; } + private void onResponse(CompletableResultCode result, int numItems, GrpcResponse grpcResponse) { + int statusCode = grpcResponse.grpcStatusValue(); + + if (statusCode == 0) { + exporterMetrics.addSuccess(numItems); + result.succeed(); + return; + } + + exporterMetrics.addFailed(numItems); + switch (statusCode) { + case GRPC_STATUS_UNIMPLEMENTED: + if (loggedUnimplemented.compareAndSet(false, true)) { + GrpcExporterUtil.logUnimplemented( + internalLogger, type, grpcResponse.grpcStatusDescription()); + } + break; + case GRPC_STATUS_UNAVAILABLE: + logger.log( + Level.SEVERE, + "Failed to export " + + type + + "s. Server is UNAVAILABLE. " + + "Make sure your collector is running and reachable from this network. " + + "Full error message:" + + grpcResponse.grpcStatusDescription()); + break; + default: + logger.log( + Level.WARNING, + "Failed to export " + + type + + "s. Server responded with gRPC status code " + + statusCode + + ". Error message: " + + grpcResponse.grpcStatusDescription()); + break; + } + result.failExceptionally(FailedExportException.grpcFailedWithResponse(grpcResponse)); + } + + private void onError(CompletableResultCode result, int numItems, Throwable e) { + exporterMetrics.addFailed(numItems); + logger.log( + Level.SEVERE, + "Failed to export " + + type + + "s. The request could not be executed. Error message: " + + e.getMessage(), + e); + if (logger.isLoggable(Level.FINEST)) { + logger.log(Level.FINEST, "Failed to export " + type + "s. Details follow: " + e); + } + result.failExceptionally(FailedExportException.grpcFailedExceptionally(e)); + } + public CompletableResultCode shutdown() { if (!isShutdown.compareAndSet(false, true)) { logger.log(Level.INFO, "Calling shutdown() multiple times."); diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcExporterBuilder.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcExporterBuilder.java index 31eb7745d54..d5cd41d1066 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcExporterBuilder.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcExporterBuilder.java @@ -12,14 +12,20 @@ import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.exporter.internal.ExporterBuilderUtil; import io.opentelemetry.exporter.internal.TlsConfigHelper; +import io.opentelemetry.exporter.internal.compression.Compressor; import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.sdk.common.export.RetryPolicy; import java.net.URI; import java.time.Duration; +import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.ServiceLoader; import java.util.StringJoiner; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.function.Supplier; @@ -38,6 +44,8 @@ @SuppressWarnings("JavadocMethod") public class GrpcExporterBuilder { + public static final long DEFAULT_CONNECT_TIMEOUT_SECS = 10; + private static final Logger LOGGER = Logger.getLogger(GrpcExporterBuilder.class.getName()); private final String exporterName; @@ -47,12 +55,16 @@ public class GrpcExporterBuilder { grpcStubFactory; private long timeoutNanos; + private long connectTimeoutNanos = TimeUnit.SECONDS.toNanos(DEFAULT_CONNECT_TIMEOUT_SECS); private URI endpoint; - private boolean compressionEnabled = false; - private final Map headers = new HashMap<>(); + @Nullable private Compressor compressor; + private final Map constantHeaders = new HashMap<>(); + private Supplier> headerSupplier = Collections::emptyMap; private TlsConfigHelper tlsConfigHelper = new TlsConfigHelper(); - @Nullable private RetryPolicy retryPolicy; + @Nullable private RetryPolicy retryPolicy = RetryPolicy.getDefault(); private Supplier meterProviderSupplier = GlobalOpenTelemetry::getMeterProvider; + private ClassLoader serviceClassLoader = GrpcExporterBuilder.class.getClassLoader(); + @Nullable private ExecutorService executorService; // Use Object type since gRPC may not be on the classpath. @Nullable private Object grpcChannel; @@ -78,7 +90,7 @@ public GrpcExporterBuilder setChannel(ManagedChannel channel) { } public GrpcExporterBuilder setTimeout(long timeout, TimeUnit unit) { - timeoutNanos = unit.toNanos(timeout); + timeoutNanos = timeout == 0 ? Long.MAX_VALUE : unit.toNanos(timeout); return this; } @@ -86,13 +98,18 @@ public GrpcExporterBuilder setTimeout(Duration timeout) { return setTimeout(timeout.toNanos(), TimeUnit.NANOSECONDS); } + public GrpcExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + connectTimeoutNanos = timeout == 0 ? Long.MAX_VALUE : unit.toNanos(timeout); + return this; + } + public GrpcExporterBuilder setEndpoint(String endpoint) { this.endpoint = ExporterBuilderUtil.validateEndpoint(endpoint); return this; } - public GrpcExporterBuilder setCompression(String compressionMethod) { - this.compressionEnabled = compressionMethod.equals("gzip"); + public GrpcExporterBuilder setCompression(@Nullable Compressor compressor) { + this.compressor = compressor; return this; } @@ -113,18 +130,33 @@ public GrpcExporterBuilder setSslContext( return this; } - public GrpcExporterBuilder addHeader(String key, String value) { - headers.put(key, value); + public GrpcExporterBuilder addConstantHeader(String key, String value) { + constantHeaders.put(key, value); return this; } - public GrpcExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { + public GrpcExporterBuilder setHeadersSupplier(Supplier> headerSupplier) { + this.headerSupplier = headerSupplier; + return this; + } + + public GrpcExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } - public GrpcExporterBuilder setMeterProvider(MeterProvider meterProvider) { - this.meterProviderSupplier = () -> meterProvider; + public GrpcExporterBuilder setMeterProvider(Supplier meterProviderSupplier) { + this.meterProviderSupplier = meterProviderSupplier; + return this; + } + + public GrpcExporterBuilder setServiceClassLoader(ClassLoader servieClassLoader) { + this.serviceClassLoader = servieClassLoader; + return this; + } + + public GrpcExporterBuilder setExecutorService(ExecutorService executorService) { + this.executorService = executorService; return this; } @@ -140,9 +172,11 @@ public GrpcExporterBuilder copy() { grpcEndpointPath); copy.timeoutNanos = timeoutNanos; + copy.connectTimeoutNanos = connectTimeoutNanos; copy.endpoint = endpoint; - copy.compressionEnabled = compressionEnabled; - copy.headers.putAll(headers); + copy.compressor = compressor; + copy.constantHeaders.putAll(constantHeaders); + copy.headerSupplier = headerSupplier; copy.tlsConfigHelper = tlsConfigHelper.copy(); if (retryPolicy != null) { copy.retryPolicy = retryPolicy.toBuilder().build(); @@ -153,19 +187,44 @@ public GrpcExporterBuilder copy() { } public GrpcExporter build() { + Supplier>> headerSupplier = + () -> { + Map> result = new HashMap<>(); + Map supplierResult = this.headerSupplier.get(); + if (supplierResult != null) { + supplierResult.forEach( + (key, value) -> result.put(key, Collections.singletonList(value))); + } + constantHeaders.forEach( + (key, value) -> + result.merge( + key, + Collections.singletonList(value), + (v1, v2) -> { + List merged = new ArrayList<>(v1); + merged.addAll(v2); + return merged; + })); + return result; + }; + + boolean isPlainHttp = "http".equals(endpoint.getScheme()); GrpcSenderProvider grpcSenderProvider = resolveGrpcSenderProvider(); GrpcSender grpcSender = grpcSenderProvider.createSender( - endpoint, - grpcEndpointPath, - compressionEnabled, - timeoutNanos, - headers, - grpcChannel, - grpcStubFactory, - retryPolicy, - tlsConfigHelper.getSslContext(), - tlsConfigHelper.getTrustManager()); + GrpcSenderConfig.create( + endpoint, + grpcEndpointPath, + compressor, + timeoutNanos, + connectTimeoutNanos, + headerSupplier, + grpcChannel, + grpcStubFactory, + retryPolicy, + isPlainHttp ? null : tlsConfigHelper.getSslContext(), + isPlainHttp ? null : tlsConfigHelper.getTrustManager(), + executorService)); LOGGER.log(Level.FINE, "Using GrpcSender: " + grpcSender.getClass().getName()); return new GrpcExporter<>(exporterName, type, grpcSender, meterProviderSupplier); @@ -181,9 +240,16 @@ public String toString(boolean includePrefixAndSuffix) { joiner.add("endpoint=" + endpoint.toString()); joiner.add("endpointPath=" + grpcEndpointPath); joiner.add("timeoutNanos=" + timeoutNanos); - joiner.add("compressionEnabled=" + compressionEnabled); + joiner.add("connectTimeoutNanos=" + connectTimeoutNanos); + joiner.add( + "compressorEncoding=" + + Optional.ofNullable(compressor).map(Compressor::getEncoding).orElse(null)); StringJoiner headersJoiner = new StringJoiner(", ", "Headers{", "}"); - headers.forEach((key, value) -> headersJoiner.add(key + "=OBFUSCATED")); + constantHeaders.forEach((key, value) -> headersJoiner.add(key + "=OBFUSCATED")); + Map headers = headerSupplier.get(); + if (headers != null) { + headers.forEach((key, value) -> headersJoiner.add(key + "=OBFUSCATED")); + } joiner.add("headers=" + headersJoiner); if (retryPolicy != null) { joiner.add("retryPolicy=" + retryPolicy); @@ -191,6 +257,10 @@ public String toString(boolean includePrefixAndSuffix) { if (grpcChannel != null) { joiner.add("grpcChannel=" + grpcChannel); } + joiner.add("serviceClassLoader=" + serviceClassLoader); + if (executorService != null) { + joiner.add("executorService=" + executorService); + } // Note: omit tlsConfigHelper because we can't log the configuration in any readable way // Note: omit meterProviderSupplier because we can't log the configuration in any readable way return joiner.toString(); @@ -217,10 +287,10 @@ public String toString() { * matching provider. If none match, throw {@link IllegalStateException}. * */ - private static GrpcSenderProvider resolveGrpcSenderProvider() { + private GrpcSenderProvider resolveGrpcSenderProvider() { Map grpcSenderProviders = new HashMap<>(); for (GrpcSenderProvider spi : - ServiceLoader.load(GrpcSenderProvider.class, GrpcExporterBuilder.class.getClassLoader())) { + ServiceLoader.load(GrpcSenderProvider.class, serviceClassLoader)) { grpcSenderProviders.put(spi.getClass().getName(), spi); } diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcResponse.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcResponse.java index 01ac2f53e6f..4602cbc0ba7 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcResponse.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcResponse.java @@ -25,4 +25,6 @@ public static GrpcResponse create(int grpcStatusValue, @Nullable String grpcStat @Nullable public abstract String grpcStatusDescription(); + + // TODO(jack-berg): add byte[] responseBody() throws IOException; } diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcSender.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcSender.java index d2dc05b16fd..ed85d630e42 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcSender.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcSender.java @@ -7,7 +7,7 @@ import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.sdk.common.CompletableResultCode; -import java.util.function.BiConsumer; +import java.util.function.Consumer; /** * An exporter of a messages encoded by {@link Marshaler} using the gRPC wire format. @@ -17,7 +17,7 @@ */ public interface GrpcSender { - void send(T request, Runnable onSuccess, BiConsumer onError); + void send(T request, Consumer onResponse, Consumer onError); /** Shutdown the sender. */ CompletableResultCode shutdown(); diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcSenderConfig.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcSenderConfig.java new file mode 100644 index 00000000000..8f4f546d6c4 --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcSenderConfig.java @@ -0,0 +1,91 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.grpc; + +import com.google.auto.value.AutoValue; +import io.grpc.Channel; +import io.opentelemetry.exporter.internal.compression.Compressor; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.sdk.common.export.RetryPolicy; +import java.net.URI; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.function.BiFunction; +import java.util.function.Supplier; +import javax.annotation.Nullable; +import javax.annotation.concurrent.Immutable; +import javax.net.ssl.SSLContext; +import javax.net.ssl.X509TrustManager; + +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ +@AutoValue +@Immutable +public abstract class GrpcSenderConfig { + + @SuppressWarnings("TooManyParameters") + public static GrpcSenderConfig create( + URI endpoint, + String endpointPath, + @Nullable Compressor compressor, + long timeoutNanos, + long connectTimeoutNanos, + Supplier>> headersSupplier, + @Nullable Object managedChannel, + Supplier>> stubFactory, + @Nullable RetryPolicy retryPolicy, + @Nullable SSLContext sslContext, + @Nullable X509TrustManager trustManager, + @Nullable ExecutorService executorService) { + return new AutoValue_GrpcSenderConfig<>( + endpoint, + endpointPath, + compressor, + timeoutNanos, + connectTimeoutNanos, + headersSupplier, + managedChannel, + stubFactory, + retryPolicy, + sslContext, + trustManager, + executorService); + } + + public abstract URI getEndpoint(); + + public abstract String getEndpointPath(); + + @Nullable + public abstract Compressor getCompressor(); + + public abstract long getTimeoutNanos(); + + public abstract long getConnectTimeoutNanos(); + + public abstract Supplier>> getHeadersSupplier(); + + @Nullable + public abstract Object getManagedChannel(); + + public abstract Supplier>> + getStubFactory(); + + @Nullable + public abstract RetryPolicy getRetryPolicy(); + + @Nullable + public abstract SSLContext getSslContext(); + + @Nullable + public abstract X509TrustManager getTrustManager(); + + @Nullable + public abstract ExecutorService getExecutorService(); +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcSenderProvider.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcSenderProvider.java index f3cacdf5409..5b2883cf066 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcSenderProvider.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/grpc/GrpcSenderProvider.java @@ -5,16 +5,7 @@ package io.opentelemetry.exporter.internal.grpc; -import io.grpc.Channel; import io.opentelemetry.exporter.internal.marshal.Marshaler; -import io.opentelemetry.sdk.common.export.RetryPolicy; -import java.net.URI; -import java.util.Map; -import java.util.function.BiFunction; -import java.util.function.Supplier; -import javax.annotation.Nullable; -import javax.net.ssl.SSLContext; -import javax.net.ssl.X509TrustManager; /** * A service provider interface (SPI) for providing {@link GrpcSender}s backed by different client @@ -25,17 +16,6 @@ */ public interface GrpcSenderProvider { - /** Returns a {@link GrpcSender} configured with the provided parameters. */ - @SuppressWarnings("TooManyParameters") - GrpcSender createSender( - URI endpoint, - String endpointPath, - boolean compressionEnabled, - long timeoutNanos, - Map headers, - @Nullable Object managedChannel, - Supplier>> stubFactory, - @Nullable RetryPolicy retryPolicy, - @Nullable SSLContext sslContext, - @Nullable X509TrustManager trustManager); + /** Returns a {@link GrpcSender} configured with the provided config. */ + GrpcSender createSender(GrpcSenderConfig grpcSenderConfig); } diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpExporter.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpExporter.java index f7af7150f74..5ab37416b97 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpExporter.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpExporter.java @@ -7,14 +7,13 @@ import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.exporter.internal.ExporterMetrics; +import io.opentelemetry.exporter.internal.FailedExportException; import io.opentelemetry.exporter.internal.grpc.GrpcExporterUtil; import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.internal.ThrottlingLogger; import java.io.IOException; -import java.io.OutputStream; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; import java.util.function.Supplier; import java.util.logging.Level; import java.util.logging.Logger; @@ -37,7 +36,6 @@ public final class HttpExporter { private final String type; private final HttpSender httpSender; private final ExporterMetrics exporterMetrics; - private final boolean exportAsJson; public HttpExporter( String exporterName, @@ -51,7 +49,6 @@ public HttpExporter( exportAsJson ? ExporterMetrics.createHttpJson(exporterName, type, meterProviderSupplier) : ExporterMetrics.createHttpProtobuf(exporterName, type, meterProviderSupplier); - this.exportAsJson = exportAsJson; } public CompletableResultCode export(T exportRequest, int numItems) { @@ -63,67 +60,60 @@ public CompletableResultCode export(T exportRequest, int numItems) { CompletableResultCode result = new CompletableResultCode(); - Consumer marshaler = - os -> { - try { - if (exportAsJson) { - exportRequest.writeJsonTo(os); - } else { - exportRequest.writeBinaryTo(os); - } - } catch (IOException e) { - throw new IllegalStateException(e); - } - }; - httpSender.send( - marshaler, + exportRequest, exportRequest.getBinarySerializedSize(), - httpResponse -> { - int statusCode = httpResponse.statusCode(); - - if (statusCode >= 200 && statusCode < 300) { - exporterMetrics.addSuccess(numItems); - result.succeed(); - return; - } - - exporterMetrics.addFailed(numItems); - - byte[] body; - try { - body = httpResponse.responseBody(); - } catch (IOException ex) { - throw new IllegalStateException(ex); - } - - String status = extractErrorStatus(httpResponse.statusMessage(), body); - - logger.log( - Level.WARNING, - "Failed to export " - + type - + "s. Server responded with HTTP status code " - + statusCode - + ". Error message: " - + status); - result.fail(); - }, - e -> { - exporterMetrics.addFailed(numItems); - logger.log( - Level.SEVERE, - "Failed to export " - + type - + "s. The request could not be executed. Full error message: " - + e.getMessage(), - e); - result.fail(); - }); + httpResponse -> onResponse(result, numItems, httpResponse), + throwable -> onError(result, numItems, throwable)); return result; } + private void onResponse( + CompletableResultCode result, int numItems, HttpSender.Response httpResponse) { + int statusCode = httpResponse.statusCode(); + + if (statusCode >= 200 && statusCode < 300) { + exporterMetrics.addSuccess(numItems); + result.succeed(); + return; + } + + exporterMetrics.addFailed(numItems); + + byte[] body = null; + try { + body = httpResponse.responseBody(); + } catch (IOException ex) { + logger.log(Level.FINE, "Unable to obtain response body", ex); + } + + String status = extractErrorStatus(httpResponse.statusMessage(), body); + + logger.log( + Level.WARNING, + "Failed to export " + + type + + "s. Server responded with HTTP status code " + + statusCode + + ". Error message: " + + status); + + result.failExceptionally(FailedExportException.httpFailedWithResponse(httpResponse)); + } + + private void onError(CompletableResultCode result, int numItems, Throwable e) { + exporterMetrics.addFailed(numItems); + logger.log( + Level.SEVERE, + "Failed to export " + + type + + "s. The request could not be executed. Full error message: " + + e.getMessage(), + e); + result.failExceptionally(FailedExportException.httpFailedExceptionally(e)); + } + public CompletableResultCode shutdown() { if (!isShutdown.compareAndSet(false, true)) { logger.log(Level.INFO, "Calling shutdown() multiple times."); diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpExporterBuilder.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpExporterBuilder.java index 5227a760eb5..631dd0f9a9e 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpExporterBuilder.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpExporterBuilder.java @@ -10,16 +10,20 @@ import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.exporter.internal.ExporterBuilderUtil; import io.opentelemetry.exporter.internal.TlsConfigHelper; -import io.opentelemetry.exporter.internal.auth.Authenticator; +import io.opentelemetry.exporter.internal.compression.Compressor; import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import java.net.URI; -import java.time.Duration; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.ServiceLoader; import java.util.StringJoiner; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import java.util.logging.Level; @@ -37,6 +41,7 @@ @SuppressWarnings("checkstyle:JavadocMethod") public final class HttpExporterBuilder { public static final long DEFAULT_TIMEOUT_SECS = 10; + public static final long DEFAULT_CONNECT_TIMEOUT_SECS = 10; private static final Logger LOGGER = Logger.getLogger(HttpExporterBuilder.class.getName()); @@ -46,14 +51,18 @@ public final class HttpExporterBuilder { private String endpoint; private long timeoutNanos = TimeUnit.SECONDS.toNanos(DEFAULT_TIMEOUT_SECS); - private boolean compressionEnabled = false; + @Nullable private Compressor compressor; + private long connectTimeoutNanos = TimeUnit.SECONDS.toNanos(DEFAULT_CONNECT_TIMEOUT_SECS); + @Nullable private ProxyOptions proxyOptions; private boolean exportAsJson = false; - @Nullable private Map headers; + private final Map constantHeaders = new HashMap<>(); + private Supplier> headerSupplier = Collections::emptyMap; private TlsConfigHelper tlsConfigHelper = new TlsConfigHelper(); - @Nullable private RetryPolicy retryPolicy; + @Nullable private RetryPolicy retryPolicy = RetryPolicy.getDefault(); private Supplier meterProviderSupplier = GlobalOpenTelemetry::getMeterProvider; - @Nullable private Authenticator authenticator; + private ClassLoader serviceClassLoader = HttpExporterBuilder.class.getClassLoader(); + @Nullable private ExecutorService executorService; public HttpExporterBuilder(String exporterName, String type, String defaultEndpoint) { this.exporterName = exporterName; @@ -63,12 +72,13 @@ public HttpExporterBuilder(String exporterName, String type, String defaultEndpo } public HttpExporterBuilder setTimeout(long timeout, TimeUnit unit) { - timeoutNanos = unit.toNanos(timeout); + timeoutNanos = timeout == 0 ? Long.MAX_VALUE : unit.toNanos(timeout); return this; } - public HttpExporterBuilder setTimeout(Duration timeout) { - return setTimeout(timeout.toNanos(), TimeUnit.NANOSECONDS); + public HttpExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + connectTimeoutNanos = timeout == 0 ? Long.MAX_VALUE : unit.toNanos(timeout); + return this; } public HttpExporterBuilder setEndpoint(String endpoint) { @@ -77,21 +87,18 @@ public HttpExporterBuilder setEndpoint(String endpoint) { return this; } - public HttpExporterBuilder setCompression(String compressionMethod) { - this.compressionEnabled = compressionMethod.equals("gzip"); + public HttpExporterBuilder setCompression(@Nullable Compressor compressor) { + this.compressor = compressor; return this; } - public HttpExporterBuilder addHeader(String key, String value) { - if (headers == null) { - headers = new HashMap<>(); - } - headers.put(key, value); + public HttpExporterBuilder addConstantHeaders(String key, String value) { + constantHeaders.put(key, value); return this; } - public HttpExporterBuilder setAuthenticator(Authenticator authenticator) { - this.authenticator = authenticator; + public HttpExporterBuilder setHeadersSupplier(Supplier> headerSupplier) { + this.headerSupplier = headerSupplier; return this; } @@ -112,16 +119,31 @@ public HttpExporterBuilder setSslContext( return this; } - public HttpExporterBuilder setMeterProvider(MeterProvider meterProvider) { - this.meterProviderSupplier = () -> meterProvider; + public HttpExporterBuilder setMeterProvider(Supplier meterProviderSupplier) { + this.meterProviderSupplier = meterProviderSupplier; return this; } - public HttpExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { + public HttpExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } + public HttpExporterBuilder setProxyOptions(ProxyOptions proxyOptions) { + this.proxyOptions = proxyOptions; + return this; + } + + public HttpExporterBuilder setServiceClassLoader(ClassLoader servieClassLoader) { + this.serviceClassLoader = servieClassLoader; + return this; + } + + public HttpExporterBuilder setExecutorService(ExecutorService executorService) { + this.executorService = executorService; + return this; + } + public HttpExporterBuilder exportAsJson() { this.exportAsJson = true; return this; @@ -132,36 +154,59 @@ public HttpExporterBuilder copy() { HttpExporterBuilder copy = new HttpExporterBuilder<>(exporterName, type, endpoint); copy.endpoint = endpoint; copy.timeoutNanos = timeoutNanos; + copy.connectTimeoutNanos = connectTimeoutNanos; copy.exportAsJson = exportAsJson; - copy.compressionEnabled = compressionEnabled; - if (headers != null) { - copy.headers = new HashMap<>(headers); - } + copy.compressor = compressor; + copy.constantHeaders.putAll(constantHeaders); + copy.headerSupplier = headerSupplier; copy.tlsConfigHelper = tlsConfigHelper.copy(); if (retryPolicy != null) { copy.retryPolicy = retryPolicy.toBuilder().build(); } copy.meterProviderSupplier = meterProviderSupplier; - copy.authenticator = authenticator; + copy.proxyOptions = proxyOptions; return copy; } public HttpExporter build() { - Map headers = this.headers == null ? Collections.emptyMap() : this.headers; - Supplier> headerSupplier = () -> headers; - + Supplier>> headerSupplier = + () -> { + Map> result = new HashMap<>(); + Map supplierResult = this.headerSupplier.get(); + if (supplierResult != null) { + supplierResult.forEach( + (key, value) -> result.put(key, Collections.singletonList(value))); + } + constantHeaders.forEach( + (key, value) -> + result.merge( + key, + Collections.singletonList(value), + (v1, v2) -> { + List merged = new ArrayList<>(v1); + merged.addAll(v2); + return merged; + })); + return result; + }; + + boolean isPlainHttp = endpoint.startsWith("http://"); HttpSenderProvider httpSenderProvider = resolveHttpSenderProvider(); HttpSender httpSender = httpSenderProvider.createSender( - endpoint, - compressionEnabled, - exportAsJson ? "application/json" : "application/x-protobuf", - timeoutNanos, - headerSupplier, - authenticator, - retryPolicy, - tlsConfigHelper.getSslContext(), - tlsConfigHelper.getTrustManager()); + HttpSenderConfig.create( + endpoint, + compressor, + exportAsJson, + exportAsJson ? "application/json" : "application/x-protobuf", + timeoutNanos, + connectTimeoutNanos, + headerSupplier, + proxyOptions, + retryPolicy, + isPlainHttp ? null : tlsConfigHelper.getSslContext(), + isPlainHttp ? null : tlsConfigHelper.getTrustManager(), + executorService)); LOGGER.log(Level.FINE, "Using HttpSender: " + httpSender.getClass().getName()); return new HttpExporter<>(exporterName, type, httpSender, meterProviderSupplier, exportAsJson); @@ -176,19 +221,28 @@ public String toString(boolean includePrefixAndSuffix) { joiner.add("type=" + type); joiner.add("endpoint=" + endpoint); joiner.add("timeoutNanos=" + timeoutNanos); - joiner.add("compressionEnabled=" + compressionEnabled); + joiner.add("proxyOptions=" + proxyOptions); + joiner.add( + "compressorEncoding=" + + Optional.ofNullable(compressor).map(Compressor::getEncoding).orElse(null)); + joiner.add("connectTimeoutNanos=" + connectTimeoutNanos); joiner.add("exportAsJson=" + exportAsJson); + StringJoiner headersJoiner = new StringJoiner(", ", "Headers{", "}"); + constantHeaders.forEach((key, value) -> headersJoiner.add(key + "=OBFUSCATED")); + Map headers = headerSupplier.get(); if (headers != null) { - StringJoiner headersJoiner = new StringJoiner(", ", "Headers{", "}"); headers.forEach((key, value) -> headersJoiner.add(key + "=OBFUSCATED")); - joiner.add("headers=" + headersJoiner); } + joiner.add("headers=" + headersJoiner); if (retryPolicy != null) { joiner.add("retryPolicy=" + retryPolicy); } + joiner.add("serviceClassLoader=" + serviceClassLoader); + if (executorService != null) { + joiner.add("executorService=" + executorService); + } // Note: omit tlsConfigHelper because we can't log the configuration in any readable way // Note: omit meterProviderSupplier because we can't log the configuration in any readable way - // Note: omit authenticator because we can't log the configuration in any readable way return joiner.toString(); } @@ -213,10 +267,10 @@ public String toString() { * matching provider. If none match, throw {@link IllegalStateException}. * */ - private static HttpSenderProvider resolveHttpSenderProvider() { + private HttpSenderProvider resolveHttpSenderProvider() { Map httpSenderProviders = new HashMap<>(); for (HttpSenderProvider spi : - ServiceLoader.load(HttpSenderProvider.class, HttpExporterBuilder.class.getClassLoader())) { + ServiceLoader.load(HttpSenderProvider.class, serviceClassLoader)) { httpSenderProviders.put(spi.getClass().getName(), spi); } diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpSender.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpSender.java index f7e21cb781a..aec50288ebd 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpSender.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpSender.java @@ -5,9 +5,9 @@ package io.opentelemetry.exporter.internal.http; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.sdk.common.CompletableResultCode; import java.io.IOException; -import java.io.OutputStream; import java.util.function.Consumer; /** @@ -33,7 +33,7 @@ public interface HttpSender { * @param onError the callback to invoke when the HTTP request could not be executed */ void send( - Consumer marshaler, + Marshaler marshaler, int contentLength, Consumer onResponse, Consumer onError); @@ -41,7 +41,12 @@ void send( /** Shutdown the sender. */ CompletableResultCode shutdown(); - /** The HTTP response. */ + /** + * The HTTP response. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ interface Response { /** The HTTP status code. */ diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpSenderConfig.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpSenderConfig.java new file mode 100644 index 00000000000..78b63afaf1d --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpSenderConfig.java @@ -0,0 +1,87 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.http; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.exporter.internal.compression.Compressor; +import io.opentelemetry.sdk.common.export.ProxyOptions; +import io.opentelemetry.sdk.common.export.RetryPolicy; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.function.Supplier; +import javax.annotation.Nullable; +import javax.annotation.concurrent.Immutable; +import javax.net.ssl.SSLContext; +import javax.net.ssl.X509TrustManager; + +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ +@AutoValue +@Immutable +public abstract class HttpSenderConfig { + + @SuppressWarnings("TooManyParameters") + public static HttpSenderConfig create( + String endpoint, + @Nullable Compressor compressor, + boolean exportAsJson, + String contentType, + long timeoutNanos, + long connectTimeoutNanos, + Supplier>> headerSupplier, + @Nullable ProxyOptions proxyOptions, + @Nullable RetryPolicy retryPolicy, + @Nullable SSLContext sslContext, + @Nullable X509TrustManager trustManager, + @Nullable ExecutorService executorService) { + return new AutoValue_HttpSenderConfig( + endpoint, + compressor, + exportAsJson, + contentType, + timeoutNanos, + connectTimeoutNanos, + headerSupplier, + proxyOptions, + retryPolicy, + sslContext, + trustManager, + executorService); + } + + public abstract String getEndpoint(); + + @Nullable + public abstract Compressor getCompressor(); + + public abstract boolean getExportAsJson(); + + public abstract String getContentType(); + + public abstract long getTimeoutNanos(); + + public abstract long getConnectTimeoutNanos(); + + public abstract Supplier>> getHeadersSupplier(); + + @Nullable + public abstract ProxyOptions getProxyOptions(); + + @Nullable + public abstract RetryPolicy getRetryPolicy(); + + @Nullable + public abstract SSLContext getSslContext(); + + @Nullable + public abstract X509TrustManager getTrustManager(); + + @Nullable + public abstract ExecutorService getExecutorService(); +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpSenderProvider.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpSenderProvider.java index 22050e8e624..10563f5a00d 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpSenderProvider.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/http/HttpSenderProvider.java @@ -5,14 +5,6 @@ package io.opentelemetry.exporter.internal.http; -import io.opentelemetry.exporter.internal.auth.Authenticator; -import io.opentelemetry.sdk.common.export.RetryPolicy; -import java.util.Map; -import java.util.function.Supplier; -import javax.annotation.Nullable; -import javax.net.ssl.SSLContext; -import javax.net.ssl.X509TrustManager; - /** * A service provider interface (SPI) for providing {@link HttpSender}s backed by different HTTP * client libraries. @@ -22,15 +14,6 @@ */ public interface HttpSenderProvider { - /** Returns a {@link HttpSender} configured with the provided parameters. */ - HttpSender createSender( - String endpoint, - boolean compressionEnabled, - String contentType, - long timeoutNanos, - Supplier> headerSupplier, - @Nullable Authenticator authenticator, - @Nullable RetryPolicy retryPolicy, - @Nullable SSLContext sslContext, - @Nullable X509TrustManager trustManager); + /** Returns a {@link HttpSender} configured with the provided config. */ + HttpSender createSender(HttpSenderConfig httpSenderConfig); } diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/CodedOutputStream.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/CodedOutputStream.java index 8cc17a7834b..68311be1845 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/CodedOutputStream.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/CodedOutputStream.java @@ -44,6 +44,7 @@ import io.opentelemetry.api.internal.ConfigUtil; import java.io.IOException; import java.io.OutputStream; +import java.nio.ByteBuffer; /** * Protobuf wire encoder. @@ -56,7 +57,7 @@ // // Differences // - No support for Message/Lite -// - No support for ByteString or ByteBuffer +// - No support for ByteString // - No support for message set extensions // - No support for Unsafe // - No support for Java String, only UTF-8 bytes @@ -329,6 +330,11 @@ public static int computeByteArraySizeNoTag(final byte[] value) { return computeLengthDelimitedFieldSize(value.length); } + /** Compute the number of bytes that would be needed to encode a {@code bytes} field. */ + public static int computeByteBufferSizeNoTag(final ByteBuffer value) { + return computeLengthDelimitedFieldSize(value.capacity()); + } + static int computeLengthDelimitedFieldSize(int fieldLength) { return computeUInt32SizeNoTag(fieldLength) + fieldLength; } @@ -375,6 +381,8 @@ static long encodeZigZag64(final long n) { abstract void writeByteArrayNoTag(final byte[] value, final int offset, final int length) throws IOException; + abstract void writeByteBufferNoTag(final ByteBuffer value) throws IOException; + // ================================================================= /** Abstract base class for buffered encoders. */ @@ -487,6 +495,49 @@ void writeByteArrayNoTag(final byte[] value, int offset, int length) throws IOEx write(value, offset, length); } + @Override + void writeByteBufferNoTag(final ByteBuffer value) throws IOException { + writeUInt32NoTag(value.capacity()); + if (value.hasArray()) { + write(value.array(), value.arrayOffset(), value.capacity()); + } else { + write((ByteBuffer) value.duplicate().clear()); + } + } + + void write(ByteBuffer value) throws IOException { + int length = value.remaining(); + if (limit - position >= length) { + // We have room in the current buffer. + value.get(buffer, position, length); + position += length; + totalBytesWritten += length; + } else { + // Write extends past current buffer. Fill the rest of this buffer and + // flush. + final int bytesWritten = limit - position; + value.get(buffer, position, bytesWritten); + length -= bytesWritten; + position = limit; + totalBytesWritten += bytesWritten; + doFlush(); + + // Now deal with the rest. + // Since we have an output stream, this is our buffer + // and buffer offset == 0 + while (length > limit) { + // Copy data into the buffer before writing it to OutputStream. + value.get(buffer, 0, limit); + out.write(buffer, 0, limit); + length -= limit; + totalBytesWritten += limit; + } + value.get(buffer, 0, length); + position = length; + totalBytesWritten += length; + } + } + @Override void write(byte value) throws IOException { if (position == limit) { diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/JsonSerializer.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/JsonSerializer.java index ec805fc1fbf..6170e1925d2 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/JsonSerializer.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/JsonSerializer.java @@ -9,6 +9,7 @@ import com.fasterxml.jackson.core.JsonGenerator; import java.io.IOException; import java.io.OutputStream; +import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.List; @@ -81,6 +82,11 @@ protected void writeUInt64Value(long value) throws IOException { generator.writeString(Long.toString(value)); } + @Override + public void writeUInt64(ProtoFieldInfo field, long value) throws IOException { + generator.writeStringField(field.getJsonName(), Long.toString(value)); + } + @Override protected void writeFixed32(ProtoFieldInfo field, int value) throws IOException { generator.writeNumberField(field.getJsonName(), value); @@ -109,10 +115,41 @@ public void writeString(ProtoFieldInfo field, byte[] utf8Bytes) throws IOExcepti } @Override - protected void writeBytes(ProtoFieldInfo field, byte[] value) throws IOException { + public void writeString( + ProtoFieldInfo field, String string, int utf8Length, MarshalerContext context) + throws IOException { + generator.writeFieldName(field.getJsonName()); + generator.writeString(string); + } + + @Override + public void writeRepeatedString(ProtoFieldInfo field, byte[][] utf8Bytes) throws IOException { + generator.writeArrayFieldStart(field.getJsonName()); + for (byte[] value : utf8Bytes) { + // Marshalers encoded String into UTF-8 bytes to optimize for binary serialization where + // we are able to avoid the encoding process happening twice, one for size computation and one + // for actual writing. JsonGenerator actually has a writeUTF8String that would be able to + // accept + // this, but it only works when writing to an OutputStream, but not to a String like we do for + // writing to logs. It's wasteful to take a String, convert it to bytes, and convert back to + // the same String but we can see if this can be improved in the future. + generator.writeString(new String(value, StandardCharsets.UTF_8)); + } + generator.writeEndArray(); + } + + @Override + public void writeBytes(ProtoFieldInfo field, byte[] value) throws IOException { generator.writeBinaryField(field.getJsonName(), value); } + @Override + public void writeByteBuffer(ProtoFieldInfo field, ByteBuffer value) throws IOException { + byte[] data = new byte[value.capacity()]; + ((ByteBuffer) value.duplicate().clear()).get(data); + generator.writeBinaryField(field.getJsonName(), data); + } + @Override protected void writeStartMessage(ProtoFieldInfo field, int protoMessageSize) throws IOException { generator.writeObjectFieldStart(field.getJsonName()); @@ -165,6 +202,44 @@ public void serializeRepeatedMessage( generator.writeEndArray(); } + @Override + public void serializeRepeatedMessageWithContext( + ProtoFieldInfo field, + List messages, + StatelessMarshaler marshaler, + MarshalerContext context) + throws IOException { + generator.writeArrayFieldStart(field.getJsonName()); + for (int i = 0; i < messages.size(); i++) { + T message = messages.get(i); + generator.writeStartObject(); + marshaler.writeTo(this, message, context); + generator.writeEndObject(); + } + generator.writeEndArray(); + } + + @Override + protected void writeStartRepeated(ProtoFieldInfo field) throws IOException { + generator.writeArrayFieldStart(field.getJsonName()); + } + + @Override + protected void writeEndRepeated() throws IOException { + generator.writeEndArray(); + } + + @Override + protected void writeStartRepeatedElement(ProtoFieldInfo field, int protoMessageSize) + throws IOException { + generator.writeStartObject(); + } + + @Override + protected void writeEndRepeatedElement() throws IOException { + generator.writeEndObject(); + } + // Not a field. void writeMessageValue(Marshaler message) throws IOException { generator.writeStartObject(); diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/Marshaler.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/Marshaler.java index 2e6fba4644c..e942673da13 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/Marshaler.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/Marshaler.java @@ -32,12 +32,23 @@ public final void writeJsonTo(OutputStream output) throws IOException { } /** Marshals into the {@link JsonGenerator} in proto JSON format. */ - public final void writeJsonTo(JsonGenerator output) throws IOException { + // Intentionally not overloading writeJsonTo(OutputStream) in order to avoid compilation + // dependency on jackson when using writeJsonTo(OutputStream). See: + // https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1551#discussion_r1849064365 + public final void writeJsonToGenerator(JsonGenerator output) throws IOException { try (JsonSerializer serializer = new JsonSerializer(output)) { serializer.writeMessageValue(this); } } + /** Marshals into the {@link JsonGenerator} in proto JSON format and adds a newline. */ + public final void writeJsonWithNewline(JsonGenerator output) throws IOException { + try (JsonSerializer serializer = new JsonSerializer(output)) { + serializer.writeMessageValue(this); + output.writeRaw('\n'); + } + } + /** Returns the number of bytes this Marshaler will write in proto binary format. */ public abstract int getBinarySerializedSize(); diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/MarshalerContext.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/MarshalerContext.java new file mode 100644 index 00000000000..80d01e84392 --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/MarshalerContext.java @@ -0,0 +1,238 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.marshal; + +import io.opentelemetry.api.trace.SpanId; +import io.opentelemetry.api.trace.TraceId; +import java.util.ArrayList; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Consumer; +import java.util.function.Supplier; +import javax.annotation.Nullable; + +/** + * Class for keeping marshaling state. The state consists of integers, that we call sizes, and + * objects, that we call data. Both integers and objects can be read from the state in the order + * they were added (first in, first out). Additionally, this class provides various pools and caches + * for objects that can be reused between marshalling attempts. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class MarshalerContext { + private final boolean marshalStringNoAllocation; + private final boolean marshalStringUnsafe; + + private int[] sizes = new int[16]; + private int sizeReadIndex; + private int sizeWriteIndex; + private Object[] data = new Object[16]; + private int dataReadIndex; + private int dataWriteIndex; + + public MarshalerContext() { + this(/* marshalStringNoAllocation= */ true, /* marshalStringUnsafe= */ true); + } + + public MarshalerContext(boolean marshalStringNoAllocation, boolean marshalStringUnsafe) { + this.marshalStringNoAllocation = marshalStringNoAllocation; + this.marshalStringUnsafe = marshalStringUnsafe; + } + + public boolean marshalStringNoAllocation() { + return marshalStringNoAllocation; + } + + public boolean marshalStringUnsafe() { + return marshalStringUnsafe; + } + + public void addSize(int size) { + growSizeIfNeeded(); + sizes[sizeWriteIndex++] = size; + } + + public int addSize() { + growSizeIfNeeded(); + return sizeWriteIndex++; + } + + private void growSizeIfNeeded() { + if (sizeWriteIndex == sizes.length) { + int[] newSizes = new int[sizes.length * 2]; + System.arraycopy(sizes, 0, newSizes, 0, sizes.length); + sizes = newSizes; + } + } + + public void setSize(int index, int size) { + sizes[index] = size; + } + + public int getSize() { + return sizes[sizeReadIndex++]; + } + + public void addData(@Nullable Object o) { + growDataIfNeeded(); + data[dataWriteIndex++] = o; + } + + private void growDataIfNeeded() { + if (dataWriteIndex == data.length) { + Object[] newData = new Object[data.length * 2]; + System.arraycopy(data, 0, newData, 0, data.length); + data = newData; + } + } + + public T getData(Class type) { + return type.cast(data[dataReadIndex++]); + } + + private final IdPool traceIdPool = new IdPool(TraceId.getLength() / 2); + + /** Returns a buffer that can be used to hold a trace id. */ + public byte[] getTraceIdBuffer() { + return traceIdPool.get(); + } + + private final IdPool spanIdPool = new IdPool(SpanId.getLength() / 2); + + /** Returns a buffer that can be used to hold a span id. */ + public byte[] getSpanIdBuffer() { + return spanIdPool.get(); + } + + private static class IdPool { + private final List pool = new ArrayList<>(); + int index; + final int idSize; + + IdPool(int idSize) { + this.idSize = idSize; + } + + byte[] get() { + if (index < pool.size()) { + return pool.get(index++); + } + byte[] result = new byte[idSize]; + pool.add(result); + index++; + + return result; + } + + void reset() { + index = 0; + } + } + + private final Pool> mapPool = new Pool<>(IdentityHashMap::new, Map::clear); + + /** Returns a pooled identity map. */ + @SuppressWarnings("unchecked") + public Map getIdentityMap() { + return (Map) mapPool.get(); + } + + private final Pool> listPool = new Pool<>(ArrayList::new, List::clear); + + /** Returns a pooled list. */ + @SuppressWarnings("unchecked") + public List getList() { + return (List) listPool.get(); + } + + private static class Pool { + private final List pool = new ArrayList<>(); + private int index; + private final Supplier factory; + private final Consumer clean; + + Pool(Supplier factory, Consumer clean) { + this.factory = factory; + this.clean = clean; + } + + T get() { + if (index < pool.size()) { + return pool.get(index++); + } + T result = factory.get(); + pool.add(result); + index++; + + return result; + } + + void reset() { + for (int i = 0; i < index; i++) { + clean.accept(pool.get(i)); + } + index = 0; + } + } + + /** Reset context so that serialization could be re-run. */ + public void resetReadIndex() { + sizeReadIndex = 0; + dataReadIndex = 0; + } + + /** Reset context so that it could be reused. */ + public void reset() { + sizeReadIndex = 0; + sizeWriteIndex = 0; + for (int i = 0; i < dataWriteIndex; i++) { + data[i] = null; + } + dataReadIndex = 0; + dataWriteIndex = 0; + + traceIdPool.reset(); + spanIdPool.reset(); + + mapPool.reset(); + listPool.reset(); + } + + private static final AtomicInteger KEY_INDEX = new AtomicInteger(); + + /** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ + public static class Key { + final int index = KEY_INDEX.getAndIncrement(); + } + + public static Key key() { + return new Key(); + } + + private Object[] instances = new Object[16]; + + @SuppressWarnings("unchecked") + public T getInstance(Key key, Supplier supplier) { + if (key.index >= instances.length) { + Object[] newData = new Object[instances.length * 2]; + System.arraycopy(instances, 0, newData, 0, instances.length); + instances = newData; + } + + T result = (T) instances[key.index]; + if (result == null) { + result = supplier.get(); + instances[key.index] = result; + } + return result; + } +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/MarshalerUtil.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/MarshalerUtil.java index e0f4c6f25f7..78e8bf71be1 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/MarshalerUtil.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/MarshalerUtil.java @@ -8,10 +8,12 @@ import io.opentelemetry.api.trace.SpanId; import io.opentelemetry.api.trace.TraceId; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList; import io.opentelemetry.sdk.resources.Resource; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.UncheckedIOException; +import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; @@ -108,6 +110,16 @@ private static int sizeRepeatedFixed64(ProtoFieldInfo field, int numValues) { return size; } + /** Returns the size of a repeated string field. */ + @SuppressWarnings("AvoidObjectArrays") + public static int sizeRepeatedString(ProtoFieldInfo field, byte[][] utf8Bytes) { + int size = 0; + for (byte[] i : utf8Bytes) { + size += MarshalerUtil.sizeBytes(field, i); + } + return size; + } + /** * Returns the size of a repeated uint64 field. * @@ -128,6 +140,111 @@ public static int sizeRepeatedUInt64(ProtoFieldInfo field, long[] values) { return field.getTagSize() + CodedOutputStream.computeUInt32SizeNoTag(payloadSize) + payloadSize; } + /** + * Returns the size of a repeated uint64 field. + * + *

Packed repeated fields contain the tag, an integer representing the incoming payload size, + * and an actual payload of repeated varints. + */ + public static int sizeRepeatedUInt64(ProtoFieldInfo field, List values) { + if (values.isEmpty()) { + return 0; + } + + int payloadSize = 0; + for (long v : values) { + payloadSize += CodedOutputStream.computeUInt64SizeNoTag(v); + } + + // tag size + payload indicator size + actual payload size + return field.getTagSize() + CodedOutputStream.computeUInt32SizeNoTag(payloadSize) + payloadSize; + } + + /** + * Returns the size of a repeated uint64 field. + * + *

Packed repeated fields contain the tag, an integer representing the incoming payload size, + * and an actual payload of repeated varints. + * + *

NOTE: This method has the same logic as {@link #sizeRepeatedUInt64(ProtoFieldInfo, long[])} + * )} but instead of using a primitive array it uses {@link DynamicPrimitiveLongList} to avoid + * boxing/unboxing + */ + public static int sizeRepeatedUInt64(ProtoFieldInfo field, DynamicPrimitiveLongList values) { + if (values.isEmpty()) { + return 0; + } + + int payloadSize = 0; + for (int i = 0; i < values.size(); i++) { + long v = values.getLong(i); + payloadSize += CodedOutputStream.computeUInt64SizeNoTag(v); + } + + // tag size + payload indicator size + actual payload size + return field.getTagSize() + CodedOutputStream.computeUInt32SizeNoTag(payloadSize) + payloadSize; + } + + /** + * Returns the size of a repeated int64 field. + * + *

Packed repeated fields contain the tag, an integer representing the incoming payload size, + * and an actual payload of repeated varints. + */ + public static int sizeRepeatedInt64(ProtoFieldInfo field, long[] values) { + if (values.length == 0) { + return 0; + } + + int payloadSize = 0; + for (long v : values) { + payloadSize += CodedOutputStream.computeInt64SizeNoTag(v); + } + + // tag size + payload indicator size + actual payload size + return field.getTagSize() + CodedOutputStream.computeUInt32SizeNoTag(payloadSize) + payloadSize; + } + + /** + * Returns the size of a repeated int64 field. + * + *

Packed repeated fields contain the tag, an integer representing the incoming payload size, + * and an actual payload of repeated varints. + */ + public static int sizeRepeatedInt64(ProtoFieldInfo field, List values) { + if (values.isEmpty()) { + return 0; + } + + int payloadSize = 0; + for (long v : values) { + payloadSize += CodedOutputStream.computeInt64SizeNoTag(v); + } + + // tag size + payload indicator size + actual payload size + return field.getTagSize() + CodedOutputStream.computeUInt32SizeNoTag(payloadSize) + payloadSize; + } + + /** + * Returns the size of a repeated int32 field. + * + *

Packed repeated fields contain the tag, an integer representing the incoming payload size, + * and an actual payload of repeated varints. + */ + public static int sizeRepeatedInt32(ProtoFieldInfo field, List values) { + if (values.isEmpty()) { + return 0; + } + + int payloadSize = 0; + for (int v : values) { + payloadSize += CodedOutputStream.computeInt32SizeNoTag(v); + } + + // tag size + payload indicator size + actual payload size + return field.getTagSize() + CodedOutputStream.computeUInt32SizeNoTag(payloadSize) + payloadSize; + } + /** Returns the size of a repeated double field. */ public static int sizeRepeatedDouble(ProtoFieldInfo field, List values) { // Same as fixed64. @@ -181,6 +298,14 @@ public static int sizeInt64(ProtoFieldInfo field, long message) { return field.getTagSize() + CodedOutputStream.computeInt64SizeNoTag(message); } + /** Returns the size of a uint64 field. */ + public static int sizeUInt64(ProtoFieldInfo field, long message) { + if (message == 0) { + return 0; + } + return field.getTagSize() + CodedOutputStream.computeUInt64SizeNoTag(message); + } + /** Returns the size of a uint32 field. */ public static int sizeUInt32(ProtoFieldInfo field, int message) { if (message == 0) { @@ -205,6 +330,19 @@ public static int sizeInt32(ProtoFieldInfo field, int message) { return field.getTagSize() + CodedOutputStream.computeInt32SizeNoTag(message); } + /** Returns the size of an optional int32 field. */ + public static int sizeInt32Optional(ProtoFieldInfo field, int message) { + return field.getTagSize() + CodedOutputStream.computeInt32SizeNoTag(message); + } + + /** Returns the size of an optional int32 field. */ + public static int sizeInt32Optional(ProtoFieldInfo field, @Nullable Integer message) { + if (message == null) { + return 0; + } + return sizeInt32Optional(field, (int) message); + } + /** Returns the size of a double field. */ public static int sizeDouble(ProtoFieldInfo field, double value) { if (value == 0D) { @@ -231,6 +369,11 @@ public static int sizeFixed64Optional(ProtoFieldInfo field, long value) { return field.getTagSize() + CodedOutputStream.computeFixed64SizeNoTag(value); } + /** Returns the size of a byte field when propagated to a fixed32. */ + public static int sizeByteAsFixed32(ProtoFieldInfo field, byte message) { + return sizeFixed32(field, ((int) message) & 0xff); + } + /** Returns the size of a fixed32 field. */ public static int sizeFixed32(ProtoFieldInfo field, int message) { if (message == 0L) { @@ -247,6 +390,14 @@ public static int sizeBytes(ProtoFieldInfo field, byte[] message) { return field.getTagSize() + CodedOutputStream.computeByteArraySizeNoTag(message); } + /** Returns the size of a bytes field based on the buffer's capacity. */ + public static int sizeByteBuffer(ProtoFieldInfo field, ByteBuffer message) { + if (message.capacity() == 0) { + return 0; + } + return field.getTagSize() + CodedOutputStream.computeByteBufferSizeNoTag(message); + } + /** Returns the size of a enum field. */ // Assumes OTLP always defines the first item in an enum with number 0, which it does and will. public static int sizeEnum(ProtoFieldInfo field, ProtoEnumInfo enumValue) { diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/ProtoSerializer.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/ProtoSerializer.java index f34c5690edd..694cec8b2b9 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/ProtoSerializer.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/ProtoSerializer.java @@ -10,6 +10,7 @@ import io.opentelemetry.api.trace.TraceId; import java.io.IOException; import java.io.OutputStream; +import java.nio.ByteBuffer; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -40,6 +41,18 @@ protected void writeTraceId(ProtoFieldInfo field, String traceId) throws IOExcep writeBytes(field, traceIdBytes); } + @Override + protected void writeTraceId(ProtoFieldInfo field, String traceId, MarshalerContext context) + throws IOException { + byte[] traceIdBytes = idCache.get(traceId); + if (traceIdBytes == null) { + traceIdBytes = context.getTraceIdBuffer(); + OtelEncodingUtils.bytesFromBase16(traceId, TraceId.getLength(), traceIdBytes); + idCache.put(traceId, traceIdBytes); + } + writeBytes(field, traceIdBytes); + } + @Override protected void writeSpanId(ProtoFieldInfo field, String spanId) throws IOException { byte[] spanIdBytes = @@ -48,6 +61,18 @@ protected void writeSpanId(ProtoFieldInfo field, String spanId) throws IOExcepti writeBytes(field, spanIdBytes); } + @Override + protected void writeSpanId(ProtoFieldInfo field, String spanId, MarshalerContext context) + throws IOException { + byte[] spanIdBytes = idCache.get(spanId); + if (spanIdBytes == null) { + spanIdBytes = context.getSpanIdBuffer(); + OtelEncodingUtils.bytesFromBase16(spanId, SpanId.getLength(), spanIdBytes); + idCache.put(spanId, spanIdBytes); + } + writeBytes(field, spanIdBytes); + } + @Override public void writeBool(ProtoFieldInfo field, boolean value) throws IOException { output.writeUInt32NoTag(field.getTag()); @@ -84,6 +109,12 @@ public void writeInt64(ProtoFieldInfo field, long value) throws IOException { output.writeInt64NoTag(value); } + @Override + public void writeUInt64(ProtoFieldInfo field, long value) throws IOException { + output.writeUInt32NoTag(field.getTag()); + output.writeUInt64NoTag(value); + } + @Override protected void writeFixed64(ProtoFieldInfo field, long value) throws IOException { output.writeUInt32NoTag(field.getTag()); @@ -123,11 +154,34 @@ public void writeString(ProtoFieldInfo field, byte[] utf8Bytes) throws IOExcepti } @Override - protected void writeBytes(ProtoFieldInfo field, byte[] value) throws IOException { + public void writeString( + ProtoFieldInfo field, String string, int utf8Length, MarshalerContext context) + throws IOException { + output.writeUInt32NoTag(field.getTag()); + output.writeUInt32NoTag(utf8Length); + + StatelessMarshalerUtil.writeUtf8(output, string, utf8Length, context); + } + + @Override + public void writeRepeatedString(ProtoFieldInfo field, byte[][] utf8Bytes) throws IOException { + for (byte[] value : utf8Bytes) { + writeString(field, value); + } + } + + @Override + public void writeBytes(ProtoFieldInfo field, byte[] value) throws IOException { output.writeUInt32NoTag(field.getTag()); output.writeByteArrayNoTag(value); } + @Override + public void writeByteBuffer(ProtoFieldInfo field, ByteBuffer value) throws IOException { + output.writeUInt32NoTag(field.getTag()); + output.writeByteBufferNoTag(value); + } + @Override protected void writeStartMessage(ProtoFieldInfo field, int protoMessageSize) throws IOException { output.writeUInt32NoTag(field.getTag()); @@ -179,6 +233,42 @@ public void serializeRepeatedMessage( } } + @Override + public void serializeRepeatedMessageWithContext( + ProtoFieldInfo field, + List messages, + StatelessMarshaler marshaler, + MarshalerContext context) + throws IOException { + for (int i = 0; i < messages.size(); i++) { + T message = messages.get(i); + writeStartMessage(field, context.getSize()); + marshaler.writeTo(this, message, context); + writeEndMessage(); + } + } + + @Override + protected void writeStartRepeated(ProtoFieldInfo field) { + // Do nothing + } + + @Override + protected void writeEndRepeated() { + // Do nothing + } + + @Override + protected void writeStartRepeatedElement(ProtoFieldInfo field, int protoMessageSize) + throws IOException { + writeStartMessage(field, protoMessageSize); + } + + @Override + protected void writeEndRepeatedElement() { + writeEndMessage(); + } + @Override public void writeSerializedMessage(byte[] protoSerialized, String jsonSerialized) throws IOException { diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/Serializer.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/Serializer.java index e3f36e6f98a..100506fb3dc 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/Serializer.java +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/Serializer.java @@ -5,8 +5,17 @@ package io.opentelemetry.exporter.internal.marshal; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList; import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.ByteBuffer; +import java.util.Collection; import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.Consumer; import javax.annotation.Nullable; /** @@ -22,6 +31,7 @@ * at any time. */ public abstract class Serializer implements AutoCloseable { + private static final MarshalerContext.Key ATTRIBUTES_WRITER_KEY = MarshalerContext.key(); Serializer() {} @@ -33,8 +43,21 @@ public void serializeTraceId(ProtoFieldInfo field, @Nullable String traceId) thr writeTraceId(field, traceId); } + public void serializeTraceId( + ProtoFieldInfo field, @Nullable String traceId, MarshalerContext context) throws IOException { + if (traceId == null) { + return; + } + writeTraceId(field, traceId, context); + } + protected abstract void writeTraceId(ProtoFieldInfo field, String traceId) throws IOException; + protected void writeTraceId(ProtoFieldInfo field, String traceId, MarshalerContext context) + throws IOException { + writeTraceId(field, traceId); + } + /** Serializes a span ID field. */ public void serializeSpanId(ProtoFieldInfo field, @Nullable String spanId) throws IOException { if (spanId == null) { @@ -43,8 +66,21 @@ public void serializeSpanId(ProtoFieldInfo field, @Nullable String spanId) throw writeSpanId(field, spanId); } + public void serializeSpanId( + ProtoFieldInfo field, @Nullable String spanId, MarshalerContext context) throws IOException { + if (spanId == null) { + return; + } + writeSpanId(field, spanId, context); + } + protected abstract void writeSpanId(ProtoFieldInfo field, String spanId) throws IOException; + protected void writeSpanId(ProtoFieldInfo field, String spanId, MarshalerContext context) + throws IOException { + writeSpanId(field, spanId); + } + /** Serializes a protobuf {@code bool} field. */ public void serializeBool(ProtoFieldInfo field, boolean value) throws IOException { if (!value) { @@ -87,7 +123,7 @@ public void serializeSInt32(ProtoFieldInfo field, int value) throws IOException protected abstract void writeSInt32(ProtoFieldInfo info, int value) throws IOException; - /** Serializes a protobuf {@code uint32} field. */ + /** Serializes a protobuf {@code int32} field. */ public void serializeInt32(ProtoFieldInfo field, int value) throws IOException { if (value == 0) { return; @@ -95,6 +131,19 @@ public void serializeInt32(ProtoFieldInfo field, int value) throws IOException { writeint32(field, value); } + /** Serializes a protobuf {@code int32} field. */ + public void serializeInt32Optional(ProtoFieldInfo field, int value) throws IOException { + writeint32(field, value); + } + + /** Serializes a protobuf {@code int32} field. */ + public void serializeInt32Optional(ProtoFieldInfo field, @Nullable Integer value) + throws IOException { + if (value != null) { + serializeInt32Optional(field, (int) value); + } + } + protected abstract void writeint32(ProtoFieldInfo field, int value) throws IOException; /** Serializes a protobuf {@code int64} field. */ @@ -105,9 +154,20 @@ public void serializeInt64(ProtoFieldInfo field, long value) throws IOException writeInt64(field, value); } + /** Serializes a protobuf {@code uint64} field. */ + public void serializeUInt64(ProtoFieldInfo field, long value) throws IOException { + if (value == 0) { + return; + } + writeUInt64(field, value); + } + /** Writes a protobuf {@code int64} field, even if it matches the default value. */ public abstract void writeInt64(ProtoFieldInfo field, long value) throws IOException; + /** Writes a protobuf {@code uint64} field, even if it matches the default value. */ + public abstract void writeUInt64(ProtoFieldInfo field, long value) throws IOException; + /** Serializes a protobuf {@code fixed64} field. */ public void serializeFixed64(ProtoFieldInfo field, long value) throws IOException { if (value == 0) { @@ -127,6 +187,14 @@ public void serializeFixed64Optional(ProtoFieldInfo field, long value) throws IO protected abstract void writeUInt64Value(long value) throws IOException; + /** + * Serializes a byte as a protobuf {@code fixed32} field. Ensures that there is no sign + * propagation if the high bit in the byte is set. + */ + public void serializeByteAsFixed32(ProtoFieldInfo field, byte value) throws IOException { + serializeFixed32(field, ((int) value) & 0xff); + } + /** Serializes a protobuf {@code fixed32} field. */ public void serializeFixed32(ProtoFieldInfo field, int value) throws IOException { if (value == 0) { @@ -166,9 +234,49 @@ public void serializeString(ProtoFieldInfo field, byte[] utf8Bytes) throws IOExc writeString(field, utf8Bytes); } + /** + * Serializes a protobuf {@code repeated string} field. {@code utf8Bytes} is the UTF8 encoded + * bytes of the strings to serialize. + */ + @SuppressWarnings("AvoidObjectArrays") + public void serializeRepeatedString(ProtoFieldInfo field, byte[][] utf8Bytes) throws IOException { + if (utf8Bytes.length == 0) { + return; + } + writeRepeatedString(field, utf8Bytes); + } + + /** + * Serializes a protobuf {@code string} field. {@code string} is the value to be serialized and + * {@code utf8Length} is the length of the string after it is encoded in UTF8. This method reads + * elements from context, use together with {@link + * StatelessMarshalerUtil#sizeStringWithContext(ProtoFieldInfo, String, MarshalerContext)}. + */ + public void serializeStringWithContext( + ProtoFieldInfo field, @Nullable String string, MarshalerContext context) throws IOException { + if (string == null || string.isEmpty()) { + return; + } + if (context.marshalStringNoAllocation()) { + writeString(field, string, context.getSize(), context); + } else { + byte[] valueUtf8 = context.getData(byte[].class); + writeString(field, valueUtf8); + } + } + /** Writes a protobuf {@code string} field, even if it matches the default value. */ public abstract void writeString(ProtoFieldInfo field, byte[] utf8Bytes) throws IOException; + public abstract void writeString( + ProtoFieldInfo field, String string, int utf8Length, MarshalerContext context) + throws IOException; + + /** Writes a protobuf {@code repeated string} field, even if it matches the default value. */ + @SuppressWarnings("AvoidObjectArrays") + public abstract void writeRepeatedString(ProtoFieldInfo field, byte[][] utf8Bytes) + throws IOException; + /** Serializes a protobuf {@code bytes} field. */ public void serializeBytes(ProtoFieldInfo field, byte[] value) throws IOException { if (value.length == 0) { @@ -177,7 +285,21 @@ public void serializeBytes(ProtoFieldInfo field, byte[] value) throws IOExceptio writeBytes(field, value); } - protected abstract void writeBytes(ProtoFieldInfo field, byte[] value) throws IOException; + /** + * Serializes a protobuf {@code bytes} field. Writes all content of the ByteBuffer regardless of + * the current position and limit. Does not alter the position or limit of the provided + * ByteBuffer. + */ + public void serializeByteBuffer(ProtoFieldInfo field, ByteBuffer value) throws IOException { + if (value.capacity() == 0) { + return; + } + writeByteBuffer(field, value); + } + + public abstract void writeBytes(ProtoFieldInfo field, byte[] value) throws IOException; + + public abstract void writeByteBuffer(ProtoFieldInfo field, ByteBuffer value) throws IOException; protected abstract void writeStartMessage(ProtoFieldInfo field, int protoMessageSize) throws IOException; @@ -191,6 +313,37 @@ public void serializeMessage(ProtoFieldInfo field, Marshaler message) throws IOE writeEndMessage(); } + /** + * Serializes a protobuf embedded {@code message}. This method adds elements to context, use + * together with {@link StatelessMarshalerUtil#sizeMessageWithContext(ProtoFieldInfo, Object, + * StatelessMarshaler, MarshalerContext)}. + */ + public void serializeMessageWithContext( + ProtoFieldInfo field, T message, StatelessMarshaler marshaler, MarshalerContext context) + throws IOException { + writeStartMessage(field, context.getSize()); + marshaler.writeTo(this, message, context); + writeEndMessage(); + } + + /** + * Serializes a protobuf embedded {@code message}. This method adds elements to context, use + * together with {@link StatelessMarshalerUtil#sizeMessageWithContext(ProtoFieldInfo, Object, + * Object, StatelessMarshaler2, MarshalerContext)}. + */ + public void serializeMessageWithContext( + ProtoFieldInfo field, + K key, + V value, + StatelessMarshaler2 marshaler, + MarshalerContext context) + throws IOException { + writeStartMessage(field, context.getSize()); + marshaler.writeTo(this, key, value, context); + writeEndMessage(); + } + + @SuppressWarnings("SameParameterValue") protected abstract void writeStartRepeatedPrimitive( ProtoFieldInfo field, int protoSizePerElement, int numElements) throws IOException; @@ -201,13 +354,33 @@ protected abstract void writeStartRepeatedVarint(ProtoFieldInfo field, int paylo protected abstract void writeEndRepeatedVarint() throws IOException; + /** Serializes a {@code repeated int32} field. */ + public void serializeRepeatedInt32(ProtoFieldInfo field, List values) + throws IOException { + if (values.isEmpty()) { + return; + } + + int payloadSize = 0; + for (int v : values) { + payloadSize += CodedOutputStream.computeInt32SizeNoTag(v); + } + + writeStartRepeatedVarint(field, payloadSize); + for (int value : values) { + writeUInt64Value(value); + } + writeEndRepeatedVarint(); + } + /** Serializes a {@code repeated fixed64} field. */ public void serializeRepeatedFixed64(ProtoFieldInfo field, List values) throws IOException { if (values.isEmpty()) { return; } writeStartRepeatedPrimitive(field, WireFormat.FIXED64_SIZE, values.size()); - for (long value : values) { + for (int i = 0; i < values.size(); i++) { + Long value = values.get(i); writeFixed64Value(value); } writeEndRepeatedPrimitive(); @@ -243,6 +416,68 @@ public void serializeRepeatedUInt64(ProtoFieldInfo field, long[] values) throws writeEndRepeatedVarint(); } + /** Serializes a {@code repeated uint64} field. */ + public void serializeRepeatedUInt64(ProtoFieldInfo field, List values) throws IOException { + if (values.isEmpty()) { + return; + } + + int payloadSize = 0; + for (long v : values) { + payloadSize += CodedOutputStream.computeUInt64SizeNoTag(v); + } + + writeStartRepeatedVarint(field, payloadSize); + for (long value : values) { + writeUInt64Value(value); + } + writeEndRepeatedVarint(); + } + + /** + * Serializes a {@code repeated uint64} field. + * + *

NOTE: This is the same as {@link #serializeRepeatedUInt64(ProtoFieldInfo, long[])} but + * instead of taking a primitive array it takes a {@link DynamicPrimitiveLongList} as input. + */ + public void serializeRepeatedUInt64(ProtoFieldInfo field, DynamicPrimitiveLongList values) + throws IOException { + if (values.isEmpty()) { + return; + } + + int payloadSize = 0; + for (int i = 0; i < values.size(); i++) { + long v = values.getLong(i); + payloadSize += CodedOutputStream.computeUInt64SizeNoTag(v); + } + + writeStartRepeatedVarint(field, payloadSize); + for (int i = 0; i < values.size(); i++) { + long value = values.getLong(i); + writeUInt64Value(value); + } + writeEndRepeatedVarint(); + } + + /** Serializes a {@code repeated int64} field. */ + public void serializeRepeatedInt64(ProtoFieldInfo field, List values) throws IOException { + if (values.isEmpty()) { + return; + } + + int payloadSize = 0; + for (long v : values) { + payloadSize += CodedOutputStream.computeInt64SizeNoTag(v); + } + + writeStartRepeatedVarint(field, payloadSize); + for (long value : values) { + writeUInt64Value(value); + } + writeEndRepeatedVarint(); + } + /** Serializes a {@code repeated double} field. */ public void serializeRepeatedDouble(ProtoFieldInfo field, List values) throws IOException { @@ -250,7 +485,8 @@ public void serializeRepeatedDouble(ProtoFieldInfo field, List values) return; } writeStartRepeatedPrimitive(field, WireFormat.FIXED64_SIZE, values.size()); - for (double value : values) { + for (int i = 0; i < values.size(); i++) { + Double value = values.get(i); writeDoubleValue(value); } writeEndRepeatedPrimitive(); @@ -265,6 +501,191 @@ public abstract void serializeRepeatedMessage(ProtoFieldInfo field, Marshaler[] public abstract void serializeRepeatedMessage( ProtoFieldInfo field, List repeatedMessage) throws IOException; + /** + * Serializes {@code repeated message} field. This method reads elements from context, use + * together with {@link StatelessMarshalerUtil#sizeRepeatedMessageWithContext(ProtoFieldInfo, + * List, StatelessMarshaler, MarshalerContext)}. + */ + public abstract void serializeRepeatedMessageWithContext( + ProtoFieldInfo field, + List messages, + StatelessMarshaler marshaler, + MarshalerContext context) + throws IOException; + + /** + * Serializes {@code repeated message} field. This method reads elements from context, use + * together with {@link StatelessMarshalerUtil#sizeRepeatedMessageWithContext(ProtoFieldInfo, + * Collection, StatelessMarshaler, MarshalerContext, MarshalerContext.Key)}. + */ + @SuppressWarnings("unchecked") + public void serializeRepeatedMessageWithContext( + ProtoFieldInfo field, + Collection messages, + StatelessMarshaler marshaler, + MarshalerContext context, + MarshalerContext.Key key) + throws IOException { + if (messages instanceof List) { + serializeRepeatedMessageWithContext(field, (List) messages, marshaler, context); + return; + } + + writeStartRepeated(field); + + if (!messages.isEmpty()) { + RepeatedElementWriter writer = context.getInstance(key, RepeatedElementWriter::new); + writer.initialize(field, this, marshaler, context); + try { + messages.forEach(writer); + } catch (UncheckedIOException e) { + throw e.getCause(); + } + } + + writeEndRepeated(); + } + + /** + * Serializes {@code repeated message} field. This method reads elements from context, use + * together with {@link StatelessMarshalerUtil#sizeRepeatedMessageWithContext(ProtoFieldInfo, Map, + * StatelessMarshaler2, MarshalerContext, MarshalerContext.Key)}. + */ + public void serializeRepeatedMessageWithContext( + ProtoFieldInfo field, + Map messages, + StatelessMarshaler2 marshaler, + MarshalerContext context, + MarshalerContext.Key key) + throws IOException { + writeStartRepeated(field); + + if (!messages.isEmpty()) { + RepeatedElementPairWriter writer = + context.getInstance(key, RepeatedElementPairWriter::new); + writer.initialize(field, this, marshaler, context); + try { + messages.forEach(writer); + } catch (UncheckedIOException e) { + throw e.getCause(); + } + } + + writeEndRepeated(); + } + + /** + * Serializes {@code repeated message} field. This method reads elements from context, use + * together with {@link StatelessMarshalerUtil#sizeRepeatedMessageWithContext(ProtoFieldInfo, + * Attributes, StatelessMarshaler2, MarshalerContext)}. + */ + public void serializeRepeatedMessageWithContext( + ProtoFieldInfo field, + Attributes attributes, + StatelessMarshaler2, Object> marshaler, + MarshalerContext context) + throws IOException { + writeStartRepeated(field); + + if (!attributes.isEmpty()) { + RepeatedElementPairWriter, Object> writer = + context.getInstance(ATTRIBUTES_WRITER_KEY, RepeatedElementPairWriter::new); + writer.initialize(field, this, marshaler, context); + try { + attributes.forEach(writer); + } catch (UncheckedIOException e) { + throw e.getCause(); + } + } + + writeEndRepeated(); + } + + private static class RepeatedElementWriter implements Consumer { + @SuppressWarnings("NullAway") + private ProtoFieldInfo field; + + @SuppressWarnings("NullAway") + private Serializer output; + + @SuppressWarnings("NullAway") + private StatelessMarshaler marshaler; + + @SuppressWarnings("NullAway") + private MarshalerContext context; + + void initialize( + ProtoFieldInfo field, + Serializer output, + StatelessMarshaler marshaler, + MarshalerContext context) { + this.field = field; + this.output = output; + this.marshaler = marshaler; + this.context = context; + } + + @Override + public void accept(T element) { + try { + output.writeStartRepeatedElement(field, context.getSize()); + marshaler.writeTo(output, element, context); + output.writeEndRepeatedElement(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + } + + private static class RepeatedElementPairWriter implements BiConsumer { + @SuppressWarnings("NullAway") + private ProtoFieldInfo field; + + @SuppressWarnings("NullAway") + private Serializer output; + + @SuppressWarnings("NullAway") + private StatelessMarshaler2 marshaler; + + @SuppressWarnings("NullAway") + private MarshalerContext context; + + void initialize( + ProtoFieldInfo field, + Serializer output, + StatelessMarshaler2 marshaler, + MarshalerContext context) { + this.field = field; + this.output = output; + this.marshaler = marshaler; + this.context = context; + } + + @Override + public void accept(K key, V value) { + try { + output.writeStartRepeatedElement(field, context.getSize()); + marshaler.writeTo(output, key, value, context); + output.writeEndRepeatedElement(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + } + + /** Writes start of repeated messages. */ + protected abstract void writeStartRepeated(ProtoFieldInfo field) throws IOException; + + /** Writes end of repeated messages. */ + protected abstract void writeEndRepeated() throws IOException; + + /** Writes start of a repeated message element. */ + protected abstract void writeStartRepeatedElement(ProtoFieldInfo field, int protoMessageSize) + throws IOException; + + /** Writes end of a repeated message element. */ + protected abstract void writeEndRepeatedElement() throws IOException; + /** Writes the value for a message field that has been pre-serialized. */ public abstract void writeSerializedMessage(byte[] protoSerialized, String jsonSerialized) throws IOException; diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshaler.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshaler.java new file mode 100644 index 00000000000..dac3b7bbb94 --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshaler.java @@ -0,0 +1,28 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.marshal; + +import java.io.IOException; + +/** + * Marshaler from an SDK structure to protobuf wire format. It is intended that the instances of + * this interface don't keep marshaling state and can be singletons. Any state needed for marshaling + * should be stored in {@link MarshalerContext}. Marshaler should be used so that first {@link + * #getBinarySerializedSize} is called and after that {@link #writeTo} is called. Calling {@link + * #getBinarySerializedSize} may add values to {@link MarshalerContext} that are later used in + * {@link #writeTo}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public interface StatelessMarshaler { + + /** Returns the number of bytes marshaling given value will write in proto binary format. */ + int getBinarySerializedSize(T value, MarshalerContext context); + + /** Marshal given value using the provided {@link Serializer}. */ + void writeTo(Serializer output, T value, MarshalerContext context) throws IOException; +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshaler2.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshaler2.java new file mode 100644 index 00000000000..c2fc8769046 --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshaler2.java @@ -0,0 +1,28 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.marshal; + +import java.io.IOException; + +/** + * Marshaler from an SDK structure to protobuf wire format. It is intended that the instances of + * this interface don't keep marshaling state and can be singletons. Any state needed for marshaling + * should be stored in {@link MarshalerContext}. Marshaler should be used so that first {@link + * #getBinarySerializedSize} is called and after that {@link #writeTo} is called. Calling {@link + * #getBinarySerializedSize} may add values to {@link MarshalerContext} that are later used in + * {@link #writeTo}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public interface StatelessMarshaler2 { + + /** Returns the number of bytes this Marshaler will write. */ + int getBinarySerializedSize(K key, V value, MarshalerContext context); + + /** Marshal given key and value using the provided {@link Serializer}. */ + void writeTo(Serializer output, K key, V value, MarshalerContext context) throws IOException; +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshalerUtil.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshalerUtil.java new file mode 100644 index 00000000000..793f90cc9ae --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshalerUtil.java @@ -0,0 +1,493 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.marshal; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.function.Function; +import javax.annotation.Nullable; + +/** + * Marshaler utilities. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class StatelessMarshalerUtil { + private static final MarshalerContext.Key GROUPER_KEY = MarshalerContext.key(); + private static final MarshalerContext.Key ATTRIBUTES_SIZE_CALCULATOR_KEY = MarshalerContext.key(); + + /** Groups SDK items by resource and instrumentation scope. */ + public static Map>> groupByResourceAndScope( + Collection dataList, + Function getResource, + Function getInstrumentationScope, + MarshalerContext context) { + Map>> result = context.getIdentityMap(); + + Grouper grouper = context.getInstance(GROUPER_KEY, Grouper::new); + grouper.initialize(result, getResource, getInstrumentationScope, context); + dataList.forEach(grouper); + + return result; + } + + private static class Grouper implements Consumer { + @SuppressWarnings("NullAway") + private Map>> result; + + @SuppressWarnings("NullAway") + private Function getResource; + + @SuppressWarnings("NullAway") + private Function getInstrumentationScope; + + @SuppressWarnings("NullAway") + private MarshalerContext context; + + void initialize( + Map>> result, + Function getResource, + Function getInstrumentationScope, + MarshalerContext context) { + this.result = result; + this.getResource = getResource; + this.getInstrumentationScope = getInstrumentationScope; + this.context = context; + } + + @Override + public void accept(T data) { + Resource resource = getResource.apply(data); + Map> scopeInfoListMap = result.get(resource); + if (scopeInfoListMap == null) { + scopeInfoListMap = context.getIdentityMap(); + result.put(resource, scopeInfoListMap); + } + InstrumentationScopeInfo instrumentationScopeInfo = getInstrumentationScope.apply(data); + List elementList = scopeInfoListMap.get(instrumentationScopeInfo); + if (elementList == null) { + elementList = context.getList(); + scopeInfoListMap.put(instrumentationScopeInfo, elementList); + } + elementList.add(data); + } + } + + /** + * Returns the size of a string field. This method adds elements to context, use together with + * {@link Serializer#serializeStringWithContext(ProtoFieldInfo, String, MarshalerContext)}. + */ + public static int sizeStringWithContext( + ProtoFieldInfo field, @Nullable String value, MarshalerContext context) { + if (value == null || value.isEmpty()) { + return sizeBytes(field, 0); + } + if (context.marshalStringNoAllocation()) { + int utf8Size = getUtf8Size(value, context); + context.addSize(utf8Size); + return sizeBytes(field, utf8Size); + } else { + byte[] valueUtf8 = MarshalerUtil.toBytes(value); + context.addData(valueUtf8); + return sizeBytes(field, valueUtf8.length); + } + } + + /** Returns the size of a bytes field. */ + private static int sizeBytes(ProtoFieldInfo field, int length) { + if (length == 0) { + return 0; + } + return field.getTagSize() + CodedOutputStream.computeLengthDelimitedFieldSize(length); + } + + /** + * Returns the size of a repeated message field. This method adds elements to context, use + * together with {@link Serializer#serializeRepeatedMessageWithContext(ProtoFieldInfo, List, + * StatelessMarshaler, MarshalerContext)}. + */ + public static int sizeRepeatedMessageWithContext( + ProtoFieldInfo field, + List messages, + StatelessMarshaler marshaler, + MarshalerContext context) { + if (messages.isEmpty()) { + return 0; + } + + int size = 0; + int fieldTagSize = field.getTagSize(); + for (int i = 0; i < messages.size(); i++) { + T message = messages.get(i); + int sizeIndex = context.addSize(); + int fieldSize = marshaler.getBinarySerializedSize(message, context); + context.setSize(sizeIndex, fieldSize); + size += fieldTagSize + CodedOutputStream.computeUInt32SizeNoTag(fieldSize) + fieldSize; + } + return size; + } + + /** + * Returns the size of a repeated message field. This method adds elements to context, use + * together with {@link Serializer#serializeRepeatedMessageWithContext(ProtoFieldInfo, Collection, + * StatelessMarshaler, MarshalerContext, MarshalerContext.Key)}. + */ + @SuppressWarnings("unchecked") + public static int sizeRepeatedMessageWithContext( + ProtoFieldInfo field, + Collection messages, + StatelessMarshaler marshaler, + MarshalerContext context, + MarshalerContext.Key key) { + if (messages instanceof List) { + return sizeRepeatedMessageWithContext(field, (List) messages, marshaler, context); + } + + if (messages.isEmpty()) { + return 0; + } + + RepeatedElementSizeCalculator sizeCalculator = + context.getInstance(key, RepeatedElementSizeCalculator::new); + sizeCalculator.initialize(field, marshaler, context); + messages.forEach(sizeCalculator); + + return sizeCalculator.size; + } + + /** + * Returns the size of a repeated message field. This method adds elements to context, use + * together with {@link Serializer#serializeRepeatedMessageWithContext(ProtoFieldInfo, Map, + * StatelessMarshaler2, MarshalerContext, MarshalerContext.Key)}. + */ + public static int sizeRepeatedMessageWithContext( + ProtoFieldInfo field, + Map messages, + StatelessMarshaler2 marshaler, + MarshalerContext context, + MarshalerContext.Key key) { + if (messages.isEmpty()) { + return 0; + } + + RepeatedElementPairSizeCalculator sizeCalculator = + context.getInstance(key, RepeatedElementPairSizeCalculator::new); + sizeCalculator.initialize(field, marshaler, context); + messages.forEach(sizeCalculator); + + return sizeCalculator.size; + } + + /** + * Returns the size of a repeated message field. This method adds elements to context, use + * together with {@link Serializer#serializeRepeatedMessageWithContext(ProtoFieldInfo, Attributes, + * StatelessMarshaler2, MarshalerContext)}. + */ + public static int sizeRepeatedMessageWithContext( + ProtoFieldInfo field, + Attributes attributes, + StatelessMarshaler2, Object> marshaler, + MarshalerContext context) { + if (attributes.isEmpty()) { + return 0; + } + + RepeatedElementPairSizeCalculator, Object> sizeCalculator = + context.getInstance(ATTRIBUTES_SIZE_CALCULATOR_KEY, RepeatedElementPairSizeCalculator::new); + sizeCalculator.initialize(field, marshaler, context); + attributes.forEach(sizeCalculator); + + return sizeCalculator.size; + } + + private static class RepeatedElementSizeCalculator implements Consumer { + private int size; + private int fieldTagSize; + + @SuppressWarnings("NullAway") + private StatelessMarshaler marshaler; + + @SuppressWarnings("NullAway") + private MarshalerContext context; + + void initialize( + ProtoFieldInfo field, StatelessMarshaler marshaler, MarshalerContext context) { + this.size = 0; + this.fieldTagSize = field.getTagSize(); + this.marshaler = marshaler; + this.context = context; + } + + @Override + public void accept(T element) { + int sizeIndex = context.addSize(); + int fieldSize = marshaler.getBinarySerializedSize(element, context); + context.setSize(sizeIndex, fieldSize); + size += fieldTagSize + CodedOutputStream.computeUInt32SizeNoTag(fieldSize) + fieldSize; + } + } + + private static class RepeatedElementPairSizeCalculator implements BiConsumer { + private int size; + private int fieldTagSize; + + @SuppressWarnings("NullAway") + private StatelessMarshaler2 marshaler; + + @SuppressWarnings("NullAway") + private MarshalerContext context; + + void initialize( + ProtoFieldInfo field, StatelessMarshaler2 marshaler, MarshalerContext context) { + this.size = 0; + this.fieldTagSize = field.getTagSize(); + this.marshaler = marshaler; + this.context = context; + } + + @Override + public void accept(K key, V value) { + int sizeIndex = context.addSize(); + int fieldSize = marshaler.getBinarySerializedSize(key, value, context); + context.setSize(sizeIndex, fieldSize); + size += fieldTagSize + CodedOutputStream.computeUInt32SizeNoTag(fieldSize) + fieldSize; + } + } + + /** + * Returns the size of a message field. This method adds elements to context, use together with + * {@link Serializer#serializeMessageWithContext(ProtoFieldInfo, Object, StatelessMarshaler, + * MarshalerContext)}. + */ + public static int sizeMessageWithContext( + ProtoFieldInfo field, T element, StatelessMarshaler marshaler, MarshalerContext context) { + int sizeIndex = context.addSize(); + int fieldSize = marshaler.getBinarySerializedSize(element, context); + int size = field.getTagSize() + CodedOutputStream.computeUInt32SizeNoTag(fieldSize) + fieldSize; + context.setSize(sizeIndex, fieldSize); + return size; + } + + /** + * Returns the size of a message field. This method adds elements to context, use together with + * {@link Serializer#serializeMessageWithContext(ProtoFieldInfo, Object, Object, + * StatelessMarshaler2, MarshalerContext)}. + */ + public static int sizeMessageWithContext( + ProtoFieldInfo field, + K key, + V value, + StatelessMarshaler2 marshaler, + MarshalerContext context) { + int sizeIndex = context.addSize(); + int fieldSize = marshaler.getBinarySerializedSize(key, value, context); + int size = field.getTagSize() + CodedOutputStream.computeUInt32SizeNoTag(fieldSize) + fieldSize; + context.setSize(sizeIndex, fieldSize); + return size; + } + + /** Returns the size of utf8 encoded string in bytes. */ + private static int getUtf8Size(String string, MarshalerContext context) { + return getUtf8Size(string, context.marshalStringUnsafe()); + } + + // Visible for testing + static int getUtf8Size(String string, boolean useUnsafe) { + if (useUnsafe && UnsafeString.isAvailable() && UnsafeString.isLatin1(string)) { + byte[] bytes = UnsafeString.getBytes(string); + // latin1 bytes with negative value (most significant bit set) are encoded as 2 bytes in utf8 + return string.length() + countNegative(bytes); + } + + return encodedUtf8Length(string); + } + + // Inner loop can process at most 8 * 255 bytes without overflowing counter. To process more bytes + // inner loop has to be run multiple times. + private static final int MAX_INNER_LOOP_SIZE = 8 * 255; + // mask that selects only the most significant bit in every byte of the long + private static final long MOST_SIGNIFICANT_BIT_MASK = 0x8080808080808080L; + + /** Returns the count of bytes with negative value. */ + private static int countNegative(byte[] bytes) { + int count = 0; + int offset = 0; + // We are processing one long (8 bytes) at a time. In the inner loop we are keeping counts in a + // long where each byte in the long is a separate counter. Due to this the inner loop can + // process a maximum of 8*255 bytes at a time without overflow. + for (int i = 1; i <= bytes.length / MAX_INNER_LOOP_SIZE + 1; i++) { + long tmp = 0; // each byte in this long is a separate counter + int limit = Math.min(i * MAX_INNER_LOOP_SIZE, bytes.length & ~7); + for (; offset < limit; offset += 8) { + long value = UnsafeString.getLong(bytes, offset); + // Mask the value keeping only the most significant bit in each byte and then shift this bit + // to the position of the least significant bit in each byte. If the input byte was not + // negative then after this transformation it will be zero, if it was negative then it will + // be one. + tmp += (value & MOST_SIGNIFICANT_BIT_MASK) >>> 7; + } + // sum up counts + if (tmp != 0) { + for (int j = 0; j < 8; j++) { + count += (int) (tmp & 0xff); + tmp = tmp >>> 8; + } + } + } + + // Handle remaining bytes. Previous loop processes 8 bytes a time, if the input size is not + // divisible with 8 the remaining bytes are handled here. + for (int i = offset; i < bytes.length; i++) { + // same as if (bytes[i] < 0) count++; + count += bytes[i] >>> 31; + } + return count; + } + + // adapted from + // https://github.com/protocolbuffers/protobuf/blob/b618f6750aed641a23d5f26fbbaf654668846d24/java/core/src/main/java/com/google/protobuf/Utf8.java#L217 + private static int encodedUtf8Length(String string) { + // Warning to maintainers: this implementation is highly optimized. + int utf16Length = string.length(); + int utf8Length = utf16Length; + int i = 0; + + // This loop optimizes for pure ASCII. + while (i < utf16Length && string.charAt(i) < 0x80) { + i++; + } + + // This loop optimizes for chars less than 0x800. + for (; i < utf16Length; i++) { + char c = string.charAt(i); + if (c < 0x800) { + utf8Length += ((0x7f - c) >>> 31); // branch free! + } else { + utf8Length += encodedUtf8LengthGeneral(string, i); + break; + } + } + + if (utf8Length < utf16Length) { + // Necessary and sufficient condition for overflow because of maximum 3x expansion + throw new IllegalArgumentException( + "UTF-8 length does not fit in int: " + (utf8Length + (1L << 32))); + } + + return utf8Length; + } + + // adapted from + // https://github.com/protocolbuffers/protobuf/blob/b618f6750aed641a23d5f26fbbaf654668846d24/java/core/src/main/java/com/google/protobuf/Utf8.java#L247 + private static int encodedUtf8LengthGeneral(String string, int start) { + int utf16Length = string.length(); + int utf8Length = 0; + for (int i = start; i < utf16Length; i++) { + char c = string.charAt(i); + if (c < 0x800) { + utf8Length += (0x7f - c) >>> 31; // branch free! + } else { + utf8Length += 2; + if (Character.isSurrogate(c)) { + // Check that we have a well-formed surrogate pair. + if (Character.codePointAt(string, i) != c) { + i++; + } else { + // invalid sequence + // At this point we have accumulated 3 byes of length (2 in this method and 1 in caller) + // for current character, reduce the length to 1 bytes as we are going to encode the + // invalid character as ? + utf8Length -= 2; + } + } + } + } + + return utf8Length; + } + + /** Write utf8 encoded string to output stream. */ + @SuppressWarnings("UnusedVariable") // context argument is added for future use + static void writeUtf8( + CodedOutputStream output, String string, int utf8Length, MarshalerContext context) + throws IOException { + writeUtf8(output, string, utf8Length, context.marshalStringUnsafe()); + } + + // Visible for testing + @SuppressWarnings("UnusedVariable") // utf8Length argument is added for future use + static void writeUtf8(CodedOutputStream output, String string, int utf8Length, boolean useUnsafe) + throws IOException { + // if the length of the latin1 string and the utf8 output are the same then the string must be + // composed of only 7bit characters and can be directly copied to the output + if (useUnsafe + && UnsafeString.isAvailable() + && string.length() == utf8Length + && UnsafeString.isLatin1(string)) { + byte[] bytes = UnsafeString.getBytes(string); + output.write(bytes, 0, bytes.length); + } else { + encodeUtf8(output, string); + } + } + + // encode utf8 the same way as length is computed in encodedUtf8Length + // adapted from + // https://github.com/protocolbuffers/protobuf/blob/b618f6750aed641a23d5f26fbbaf654668846d24/java/core/src/main/java/com/google/protobuf/Utf8.java#L1016 + private static void encodeUtf8(CodedOutputStream output, String in) throws IOException { + int utf16Length = in.length(); + int i = 0; + // Designed to take advantage of + // https://wiki.openjdk.java.net/display/HotSpotInternals/RangeCheckElimination + for (char c; i < utf16Length && (c = in.charAt(i)) < 0x80; i++) { + output.write((byte) c); + } + if (i == utf16Length) { + return; + } + + for (char c; i < utf16Length; i++) { + c = in.charAt(i); + if (c < 0x80) { + // 1 byte, 7 bits + output.write((byte) c); + } else if (c < 0x800) { // 11 bits, two UTF-8 bytes + output.write((byte) ((0xF << 6) | (c >>> 6))); + output.write((byte) (0x80 | (0x3F & c))); + } else if (!Character.isSurrogate(c)) { + // Maximum single-char code point is 0xFFFF, 16 bits, three UTF-8 bytes + output.write((byte) ((0xF << 5) | (c >>> 12))); + output.write((byte) (0x80 | (0x3F & (c >>> 6)))); + output.write((byte) (0x80 | (0x3F & c))); + } else { + // Minimum code point represented by a surrogate pair is 0x10000, 17 bits, + // four UTF-8 bytes + int codePoint = Character.codePointAt(in, i); + if (codePoint != c) { + output.write((byte) ((0xF << 4) | (codePoint >>> 18))); + output.write((byte) (0x80 | (0x3F & (codePoint >>> 12)))); + output.write((byte) (0x80 | (0x3F & (codePoint >>> 6)))); + output.write((byte) (0x80 | (0x3F & codePoint))); + i++; + } else { + // invalid sequence + output.write((byte) '?'); + } + } + } + } + + private StatelessMarshalerUtil() {} +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/UnsafeAccess.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/UnsafeAccess.java new file mode 100644 index 00000000000..a57e941d6dd --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/UnsafeAccess.java @@ -0,0 +1,93 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.marshal; + +import io.opentelemetry.api.internal.ConfigUtil; +import java.lang.reflect.Field; +import sun.misc.Unsafe; + +class UnsafeAccess { + private static final int MAX_ENABLED_JAVA_VERSION = 22; + private static final boolean available = checkUnsafe(); + + static boolean isAvailable() { + return available; + } + + private static boolean checkUnsafe() { + double javaVersion = getJavaVersion(); + boolean unsafeEnabled = + Boolean.parseBoolean( + ConfigUtil.getString( + "otel.java.experimental.exporter.unsafe.enabled", + javaVersion != -1 && javaVersion <= MAX_ENABLED_JAVA_VERSION ? "true" : "false")); + if (!unsafeEnabled) { + return false; + } + + try { + Class.forName("sun.misc.Unsafe", false, UnsafeAccess.class.getClassLoader()); + return UnsafeHolder.UNSAFE != null; + } catch (ClassNotFoundException e) { + return false; + } + } + + private static double getJavaVersion() { + String specVersion = System.getProperty("java.specification.version"); + if (specVersion != null) { + try { + return Double.parseDouble(specVersion); + } catch (NumberFormatException exception) { + // ignore + } + } + return -1; + } + + static long objectFieldOffset(Field field) { + return UnsafeHolder.UNSAFE.objectFieldOffset(field); + } + + static Object getObject(Object object, long offset) { + return UnsafeHolder.UNSAFE.getObject(object, offset); + } + + static byte getByte(Object object, long offset) { + return UnsafeHolder.UNSAFE.getByte(object, offset); + } + + static int arrayBaseOffset(Class arrayClass) { + return UnsafeHolder.UNSAFE.arrayBaseOffset(arrayClass); + } + + static long getLong(Object o, long offset) { + return UnsafeHolder.UNSAFE.getLong(o, offset); + } + + private UnsafeAccess() {} + + private static class UnsafeHolder { + public static final Unsafe UNSAFE; + + static { + UNSAFE = getUnsafe(); + } + + private UnsafeHolder() {} + + @SuppressWarnings("NullAway") + private static Unsafe getUnsafe() { + try { + Field field = Unsafe.class.getDeclaredField("theUnsafe"); + field.setAccessible(true); + return (Unsafe) field.get(null); + } catch (Exception ignored) { + return null; + } + } + } +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/UnsafeString.java b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/UnsafeString.java new file mode 100644 index 00000000000..309b005fd49 --- /dev/null +++ b/exporters/common/src/main/java/io/opentelemetry/exporter/internal/marshal/UnsafeString.java @@ -0,0 +1,51 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.marshal; + +import java.lang.reflect.Field; + +class UnsafeString { + private static final long valueOffset = getStringFieldOffset("value", byte[].class); + private static final long coderOffset = getStringFieldOffset("coder", byte.class); + private static final int byteArrayBaseOffset = + UnsafeAccess.isAvailable() ? UnsafeAccess.arrayBaseOffset(byte[].class) : -1; + private static final boolean available = valueOffset != -1 && coderOffset != -1; + + static boolean isAvailable() { + return available; + } + + static boolean isLatin1(String string) { + // 0 represents latin1, 1 utf16 + return UnsafeAccess.getByte(string, coderOffset) == 0; + } + + static byte[] getBytes(String string) { + return (byte[]) UnsafeAccess.getObject(string, valueOffset); + } + + static long getLong(byte[] bytes, int index) { + return UnsafeAccess.getLong(bytes, byteArrayBaseOffset + index); + } + + private static long getStringFieldOffset(String fieldName, Class expectedType) { + if (!UnsafeAccess.isAvailable()) { + return -1; + } + + try { + Field field = String.class.getDeclaredField(fieldName); + if (field.getType() != expectedType) { + return -1; + } + return UnsafeAccess.objectFieldOffset(field); + } catch (Exception exception) { + return -1; + } + } + + private UnsafeString() {} +} diff --git a/exporters/common/src/main/resources/META-INF/native-image/io.opentelemetry.opentelemetry-exporter-common/reflect-config.json b/exporters/common/src/main/resources/META-INF/native-image/io.opentelemetry.opentelemetry-exporter-common/reflect-config.json new file mode 100644 index 00000000000..1d93899460b --- /dev/null +++ b/exporters/common/src/main/resources/META-INF/native-image/io.opentelemetry.opentelemetry-exporter-common/reflect-config.json @@ -0,0 +1,14 @@ +[ + { + "name":"io.opentelemetry.sdk.common.export.AutoValue_RetryPolicy", + "queryAllDeclaredMethods":true + }, + { + "name":"io.opentelemetry.sdk.common.export.RetryPolicy", + "queryAllDeclaredMethods":true + }, + { + "name":"io.opentelemetry.exporter.internal.compression.Compressor", + "queryAllDeclaredMethods":true + } +] diff --git a/exporters/common/src/test/java/io/opentelemetry/exporter/internal/InstrumentationUtilTest.java b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/InstrumentationUtilTest.java new file mode 100644 index 00000000000..5a6a2cfd5a2 --- /dev/null +++ b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/InstrumentationUtilTest.java @@ -0,0 +1,30 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.opentelemetry.context.Context; +import org.junit.jupiter.api.Test; + +class InstrumentationUtilTest { + + // testing deprecated implementation until it's removed + @Test + @SuppressWarnings("deprecation") + void verifySuppressInstrumentation() { + // Should be false by default. + assertFalse(InstrumentationUtil.shouldSuppressInstrumentation(Context.current())); + + // Should be true inside the Runnable passed to InstrumentationUtil.suppressInstrumentation. + InstrumentationUtil.suppressInstrumentation( + () -> assertTrue(InstrumentationUtil.shouldSuppressInstrumentation(Context.current()))); + + // Should be false after the runnable finishes. + assertFalse(InstrumentationUtil.shouldSuppressInstrumentation(Context.current())); + } +} diff --git a/exporters/common/src/test/java/io/opentelemetry/exporter/internal/TlsUtilTest.java b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/TlsUtilTest.java index caca1dee1ab..0134fd46f2d 100644 --- a/exporters/common/src/test/java/io/opentelemetry/exporter/internal/TlsUtilTest.java +++ b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/TlsUtilTest.java @@ -8,18 +8,36 @@ import static org.assertj.core.api.Assertions.assertThatCode; import com.linecorp.armeria.internal.common.util.SelfSignedCertificate; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; import java.security.KeyFactory; import java.security.cert.CertificateException; import java.security.spec.PKCS8EncodedKeySpec; import java.time.Instant; import java.util.Collections; import java.util.Date; +import java.util.stream.Stream; import javax.net.ssl.SSLException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; class TlsUtilTest { + @TempDir private Path tempDir; + + private static final String EXPLANATORY_TEXT = + "Subject: CN=Foo\n" + + "Issuer: CN=Foo\n" + + "Validity: from 7/9/2012 3:10:38 AM UTC to 7/9/2013 3:10:37 AM UTC\n"; + private SelfSignedCertificate rsaCertificate; private SelfSignedCertificate ecCertificate; @@ -60,4 +78,36 @@ void generatePrivateKey_Invalid() { .isInstanceOf(SSLException.class) .hasMessage("Unable to generate key from supported algorithms: [EC]"); } + + /** + * Append explanatory text + * prefix and verify {@link TlsUtil#keyManager(byte[], byte[])} succeeds. + */ + @ParameterizedTest + @MethodSource("keyManagerArgs") + void keyManager_CertWithExplanatoryText(SelfSignedCertificate selfSignedCertificate) + throws IOException { + Path certificate = tempDir.resolve("certificate"); + Files.write(certificate, EXPLANATORY_TEXT.getBytes(StandardCharsets.UTF_8)); + Files.write( + certificate, + com.google.common.io.Files.toByteArray(selfSignedCertificate.certificate()), + StandardOpenOption.APPEND); + Files.write(certificate, "\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND); + + assertThatCode( + () -> + TlsUtil.keyManager( + com.google.common.io.Files.toByteArray(selfSignedCertificate.privateKey()), + com.google.common.io.Files.toByteArray(new File(certificate.toString())))) + .doesNotThrowAnyException(); + } + + private static Stream keyManagerArgs() throws CertificateException { + Instant now = Instant.now(); + return Stream.of( + Arguments.of( + new SelfSignedCertificate(Date.from(now), Date.from(now), "RSA", 2048), + new SelfSignedCertificate(Date.from(now), Date.from(now), "EC", 256))); + } } diff --git a/exporters/common/src/test/java/io/opentelemetry/exporter/internal/auth/AuthenticatorTest.java b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/auth/AuthenticatorTest.java deleted file mode 100644 index 93e07e12d41..00000000000 --- a/exporters/common/src/test/java/io/opentelemetry/exporter/internal/auth/AuthenticatorTest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.internal.auth; - -import static org.assertj.core.api.Assertions.as; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; - -import io.opentelemetry.exporter.internal.grpc.GrpcExporter; -import io.opentelemetry.exporter.internal.http.HttpExporterBuilder; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import org.assertj.core.api.InstanceOfAssertFactories; -import org.junit.jupiter.api.Test; - -class AuthenticatorTest { - - @Test - void getHeaders() { - Map input = new HashMap<>(); - input.put("key1", "value1"); - input.put("key2", "value2"); - - Authenticator authenticator = () -> new HashMap<>(input); - assertThat(authenticator.getHeaders()).isEqualTo(input); - } - - @Test - void setAuthenticatorOnDelegate_Success() { - HttpExporterBuilder builder = - new HttpExporterBuilder<>("otlp", "test", "http://localhost:4318/test"); - - assertThat(builder).extracting("authenticator").isNull(); - - Authenticator authenticator = Collections::emptyMap; - - Authenticator.setAuthenticatorOnDelegate(new WithDelegate(builder), authenticator); - - assertThat(builder) - .extracting("authenticator", as(InstanceOfAssertFactories.type(Authenticator.class))) - .isSameAs(authenticator); - } - - @Test - void setAuthenticatorOnDelegate_Fail() { - Authenticator authenticator = Collections::emptyMap; - - assertThatThrownBy(() -> Authenticator.setAuthenticatorOnDelegate(new Object(), authenticator)) - .isInstanceOf(IllegalArgumentException.class); - assertThatThrownBy( - () -> - Authenticator.setAuthenticatorOnDelegate( - new WithDelegate(new Object()), authenticator)) - .isInstanceOf(IllegalArgumentException.class); - assertThatThrownBy( - () -> - Authenticator.setAuthenticatorOnDelegate( - new WithDelegate(new GrpcExporter<>(null, null, null, null)), authenticator)) - .isInstanceOf(IllegalArgumentException.class); - } - - @SuppressWarnings({"UnusedVariable", "FieldCanBeLocal"}) - private static class WithDelegate { - - private final Object delegate; - - private WithDelegate(Object delegate) { - this.delegate = delegate; - } - } -} diff --git a/exporters/common/src/test/java/io/opentelemetry/exporter/internal/grpc/GrpcExporterBuilderTest.java b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/grpc/GrpcExporterBuilderTest.java index dcf1ea64387..e562729ae1d 100644 --- a/exporters/common/src/test/java/io/opentelemetry/exporter/internal/grpc/GrpcExporterBuilderTest.java +++ b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/grpc/GrpcExporterBuilderTest.java @@ -7,6 +7,7 @@ import static org.assertj.core.api.Assertions.assertThat; +import io.opentelemetry.exporter.internal.compression.GzipCompressor; import io.opentelemetry.exporter.internal.marshal.Marshaler; import java.net.URI; import org.junit.jupiter.api.BeforeEach; @@ -25,27 +26,27 @@ void setUp() { @Test void compressionDefault() { - assertThat(builder).extracting("compressionEnabled").isEqualTo(false); + assertThat(builder).extracting("compressor").isNull(); } @Test void compressionNone() { - builder.setCompression("none"); + builder.setCompression(null); - assertThat(builder).extracting("compressionEnabled").isEqualTo(false); + assertThat(builder).extracting("compressor").isNull(); } @Test void compressionGzip() { - builder.setCompression("gzip"); + builder.setCompression(GzipCompressor.getInstance()); - assertThat(builder).extracting("compressionEnabled").isEqualTo(true); + assertThat(builder).extracting("compressor").isEqualTo(GzipCompressor.getInstance()); } @Test void compressionEnabledAndDisabled() { - builder.setCompression("gzip").setCompression("none"); + builder.setCompression(GzipCompressor.getInstance()).setCompression(null); - assertThat(builder).extracting("compressionEnabled").isEqualTo(false); + assertThat(builder).extracting("compressor").isNull(); } } diff --git a/exporters/common/src/test/java/io/opentelemetry/exporter/internal/marshal/MarshalerTest.java b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/marshal/MarshalerTest.java index eff401a2800..a89ea217cc1 100644 --- a/exporters/common/src/test/java/io/opentelemetry/exporter/internal/marshal/MarshalerTest.java +++ b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/marshal/MarshalerTest.java @@ -11,9 +11,17 @@ import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.util.Collections; +import java.util.stream.Stream; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; class MarshalerTest { @@ -47,4 +55,123 @@ protected void writeTo(Serializer output) throws IOException { assertThatThrownBy(() -> marshaler.writeBinaryTo(os)).isInstanceOf(IOException.class); assertThatThrownBy(() -> marshaler.writeJsonTo(os)).isInstanceOf(IOException.class); } + + /** + * This test ensures that instances where serializer produces runtime exceptions are properly + * converted back to checked {@link IOException}. + * + *

At various points in {@link Serializer}, we use {@code .forEach}-style methods which accept + * {@link java.util.function.Consumer} and {@link java.util.function.BiConsumer}. These consumers + * that any checked exceptions are caught and rethrown as runtime exceptions. Its essential that + * these runtime exceptions are re-converted back to checked {@link IOException} because calling + * code's error handling is built on top of {@link IOException}. Failure to convert to {@link + * IOException} leads to issues like this: #6946. + */ + @ParameterizedTest + @MethodSource("writeToArgs") + void writeTo_NoRuntimeExceptions(Writer writer) throws IOException { + Marshaler marshaler = + new Marshaler() { + @Override + public int getBinarySerializedSize() { + return 0; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + writer.writeTo(output); + } + }; + + try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { + assertThatThrownBy(() -> marshaler.writeBinaryTo(baos)).isInstanceOf(IOException.class); + } + } + + private static Stream writeToArgs() { + ProtoFieldInfo fieldInfo = ProtoFieldInfo.create(0, 0, "name"); + MarshalerContext context = new MarshalerContext(); + ThrowingMarshaler throwingMarshaler = new ThrowingMarshaler<>(new IOException("error")); + ThrowingMarshaler2 throwingMarshaler2 = + new ThrowingMarshaler2<>(new IOException("error")); + ThrowingMarshaler2, Object> throwingMarshaler2Attributes = + new ThrowingMarshaler2<>(new IOException("error")); + + return Stream.of( + Arguments.of( + asWriter( + output -> + output.serializeRepeatedMessageWithContext( + fieldInfo, + Collections.singleton("value"), + throwingMarshaler, + context, + MarshalerContext.key()))), + Arguments.of( + asWriter( + output -> + output.serializeRepeatedMessageWithContext( + fieldInfo, + Collections.singletonMap("key", "value"), + throwingMarshaler2, + context, + MarshalerContext.key()))), + Arguments.of( + asWriter( + output -> + output.serializeRepeatedMessageWithContext( + fieldInfo, + Attributes.builder().put("key", "value").build(), + throwingMarshaler2Attributes, + context)))); + } + + private static Writer asWriter(Writer writer) { + return writer; + } + + @FunctionalInterface + private interface Writer { + void writeTo(Serializer output) throws IOException; + } + + private static class ThrowingMarshaler implements StatelessMarshaler { + + private final IOException exception; + + private ThrowingMarshaler(IOException exception) { + this.exception = exception; + } + + @Override + public int getBinarySerializedSize(T value, MarshalerContext context) { + return 0; + } + + @Override + public void writeTo(Serializer output, T value, MarshalerContext context) throws IOException { + throw exception; + } + } + + private static class ThrowingMarshaler2 implements StatelessMarshaler2 { + + private final IOException exception; + + private ThrowingMarshaler2(IOException exception) { + this.exception = exception; + } + + @Override + public int getBinarySerializedSize(K key, V value, MarshalerContext context) { + return 0; + } + + @Override + public void writeTo(Serializer output, K key, V value, MarshalerContext context) + throws IOException { + throw exception; + } + } } diff --git a/exporters/common/src/test/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshalerUtilFuzzTest.java b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshalerUtilFuzzTest.java new file mode 100644 index 00000000000..d8db6d2ed6c --- /dev/null +++ b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshalerUtilFuzzTest.java @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.marshal; + +import static io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil.getUtf8Size; +import static io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtilTest.testUtf8; +import static org.assertj.core.api.Assertions.assertThat; + +import edu.berkeley.cs.jqf.fuzz.Fuzz; +import edu.berkeley.cs.jqf.fuzz.JQF; +import edu.berkeley.cs.jqf.fuzz.junit.GuidedFuzzing; +import edu.berkeley.cs.jqf.fuzz.random.NoGuidance; +import java.nio.charset.StandardCharsets; +import org.junit.jupiter.api.Test; +import org.junit.runner.Result; +import org.junit.runner.RunWith; + +@SuppressWarnings("SystemOut") +class StatelessMarshalerUtilFuzzTest { + + @RunWith(JQF.class) + public static class EncodeUf8 { + + @Fuzz + public void encodeRandomString(String value) { + int utf8Size = value.getBytes(StandardCharsets.UTF_8).length; + assertThat(getUtf8Size(value, false)).isEqualTo(utf8Size); + assertThat(getUtf8Size(value, true)).isEqualTo(utf8Size); + assertThat(testUtf8(value, utf8Size, /* useUnsafe= */ false)).isEqualTo(value); + assertThat(testUtf8(value, utf8Size, /* useUnsafe= */ true)).isEqualTo(value); + } + } + + // driver methods to avoid having to use the vintage junit engine, and to enable increasing the + // number of iterations: + + @Test + void encodeUf8WithFuzzing() { + Result result = + GuidedFuzzing.run( + EncodeUf8.class, "encodeRandomString", new NoGuidance(10000, System.out), System.out); + assertThat(result.wasSuccessful()).isTrue(); + } +} diff --git a/exporters/common/src/test/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshalerUtilTest.java b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshalerUtilTest.java new file mode 100644 index 00000000000..c4eec43572d --- /dev/null +++ b/exporters/common/src/test/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshalerUtilTest.java @@ -0,0 +1,70 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.marshal; + +import static io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil.getUtf8Size; +import static io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil.writeUtf8; +import static org.assertj.core.api.Assertions.assertThat; + +import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; +import java.util.Random; +import org.junit.jupiter.api.RepeatedTest; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +class StatelessMarshalerUtilTest { + + @ParameterizedTest + @ValueSource(strings = {"true", "false"}) + @SuppressWarnings("AvoidEscapedUnicodeCharacters") + void encodeUtf8(boolean useUnsafe) { + assertThat(getUtf8Size("", useUnsafe)).isEqualTo(0); + assertThat(testUtf8("", 0, useUnsafe)).isEqualTo(""); + + assertThat(getUtf8Size("a", useUnsafe)).isEqualTo(1); + assertThat(testUtf8("a", 1, useUnsafe)).isEqualTo("a"); + + assertThat(getUtf8Size("©", useUnsafe)).isEqualTo(2); + assertThat(testUtf8("©", 2, useUnsafe)).isEqualTo("©"); + + assertThat(getUtf8Size("∆", useUnsafe)).isEqualTo(3); + assertThat(testUtf8("∆", 3, useUnsafe)).isEqualTo("∆"); + + assertThat(getUtf8Size("😀", useUnsafe)).isEqualTo(4); + assertThat(testUtf8("😀", 4, useUnsafe)).isEqualTo("😀"); + + // test that invalid characters are replaced with ? + assertThat(getUtf8Size("\uD83D😀\uDE00", useUnsafe)).isEqualTo(6); + assertThat(testUtf8("\uD83D😀\uDE00", 6, useUnsafe)).isEqualTo("?😀?"); + + // the same invalid sequence as encoded by the jdk + byte[] bytes = "\uD83D😀\uDE00".getBytes(StandardCharsets.UTF_8); + assertThat(bytes.length).isEqualTo(6); + assertThat(new String(bytes, StandardCharsets.UTF_8)).isEqualTo("?😀?"); + } + + @RepeatedTest(1000) + void testUtf8SizeLatin1() { + Random random = new Random(); + byte[] bytes = new byte[15001]; + random.nextBytes(bytes); + String string = new String(bytes, StandardCharsets.ISO_8859_1); + int utf8Size = string.getBytes(StandardCharsets.UTF_8).length; + assertThat(getUtf8Size(string, true)).isEqualTo(utf8Size); + } + + static String testUtf8(String string, int utf8Length, boolean useUnsafe) { + try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { + CodedOutputStream codedOutputStream = CodedOutputStream.newInstance(outputStream); + writeUtf8(codedOutputStream, string, utf8Length, useUnsafe); + codedOutputStream.flush(); + return new String(outputStream.toByteArray(), StandardCharsets.UTF_8); + } catch (Exception exception) { + throw new IllegalArgumentException(exception); + } + } +} diff --git a/exporters/common/src/testWithoutUnsafe/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshalerUtilTest.java b/exporters/common/src/testWithoutUnsafe/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshalerUtilTest.java new file mode 100644 index 00000000000..8ff3ec2e04d --- /dev/null +++ b/exporters/common/src/testWithoutUnsafe/java/io/opentelemetry/exporter/internal/marshal/StatelessMarshalerUtilTest.java @@ -0,0 +1,101 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.marshal; + +import static io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil.getUtf8Size; +import static io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil.writeUtf8; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import org.junit.jupiter.api.Test; + +class StatelessMarshalerUtilTest { + + // Simulate running in an environment without sun.misc.Unsafe e.g. when running a modular + // application. To use sun.misc.Unsafe in modular application user would need to add dependency to + // jdk.unsupported module or use --add-modules jdk.unsupported. Here we use a custom child first + // class loader that does not delegate loading sun.misc classes to make sun.misc.Unsafe + // unavailable. + @Test + void encodeUtf8WithoutUnsafe() throws Exception { + ClassLoader testClassLoader = + new ClassLoader(this.getClass().getClassLoader()) { + @Override + protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { + // don't allow loading sun.misc classes + if (name.startsWith("sun.misc")) { + throw new ClassNotFoundException(name); + } + // load io.opentelemetry in the custom loader + if (name.startsWith("io.opentelemetry")) { + synchronized (this) { + Class clazz = findLoadedClass(name); + if (clazz != null) { + return clazz; + } + try (InputStream inputStream = + getParent().getResourceAsStream(name.replace(".", "/") + ".class")) { + if (inputStream != null) { + byte[] bytes = readBytes(inputStream); + // we don't bother to define packages or provide protection domain + return defineClass(name, bytes, 0, bytes.length); + } + } catch (IOException exception) { + throw new ClassNotFoundException(name, exception); + } + } + } + return super.loadClass(name, resolve); + } + }; + + // load test class in the custom loader and run the test + Class testClass = testClassLoader.loadClass(this.getClass().getName() + "$TestClass"); + assertThat(testClass.getClassLoader()).isEqualTo(testClassLoader); + Runnable test = (Runnable) testClass.getConstructor().newInstance(); + test.run(); + } + + private static byte[] readBytes(InputStream inputStream) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + byte[] buffer = new byte[1024]; + + int readCount; + while ((readCount = inputStream.read(buffer, 0, buffer.length)) != -1) { + out.write(buffer, 0, readCount); + } + return out.toByteArray(); + } + + @SuppressWarnings("unused") + public static class TestClass implements Runnable { + + @Override + public void run() { + // verify that unsafe can't be found + assertThatThrownBy(() -> Class.forName("sun.misc.Unsafe")) + .isInstanceOf(ClassNotFoundException.class); + // test the methods that use unsafe + assertThat(getUtf8Size("a", true)).isEqualTo(1); + assertThat(testUtf8("a", 0)).isEqualTo("a"); + } + + static String testUtf8(String string, int utf8Length) { + try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { + CodedOutputStream codedOutputStream = CodedOutputStream.newInstance(outputStream); + writeUtf8(codedOutputStream, string, utf8Length, true); + codedOutputStream.flush(); + return new String(outputStream.toByteArray(), StandardCharsets.UTF_8); + } catch (Exception exception) { + throw new IllegalArgumentException(exception); + } + } + } +} diff --git a/exporters/jaeger-proto/README.md b/exporters/jaeger-proto/README.md deleted file mode 100644 index f7e8a0497d8..00000000000 --- a/exporters/jaeger-proto/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# OpenTelemetry - Jaeger Proto (DEPRECATED) - -> **NOTICE**: External use of this artifact is deprecated. diff --git a/exporters/jaeger-proto/build.gradle.kts b/exporters/jaeger-proto/build.gradle.kts deleted file mode 100644 index 165686f909d..00000000000 --- a/exporters/jaeger-proto/build.gradle.kts +++ /dev/null @@ -1,25 +0,0 @@ -plugins { - id("otel.protobuf-conventions") - - id("otel.animalsniffer-conventions") -} - -description = "OpenTelemetry - Jaeger Exporter Proto (Internal Use Only)" -otelJava.moduleName.set("io.opentelemetry.exporter.jaeger.proto") - -dependencies { - api("com.google.protobuf:protobuf-java") - - compileOnly("io.grpc:grpc-api") - compileOnly("io.grpc:grpc-protobuf") - compileOnly("io.grpc:grpc-stub") -} - -tasks { - compileJava { - with(options) { - // Generated code so can't control serialization. - compilerArgs.add("-Xlint:-serial") - } - } -} diff --git a/exporters/jaeger-proto/src/main/proto/jaeger/api_v2/collector.proto b/exporters/jaeger-proto/src/main/proto/jaeger/api_v2/collector.proto deleted file mode 100644 index 03d6b6be8ca..00000000000 --- a/exporters/jaeger-proto/src/main/proto/jaeger/api_v2/collector.proto +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -syntax="proto3"; - -package jaeger.api_v2; - -import "jaeger/api_v2/model.proto"; - -option java_package = "io.opentelemetry.exporter.jaeger.proto.api_v2"; - -message PostSpansRequest { - Batch batch = 1; -} - -message PostSpansResponse { -} - -service CollectorService { - rpc PostSpans(PostSpansRequest) returns (PostSpansResponse) {} -} diff --git a/exporters/jaeger-proto/src/main/proto/jaeger/api_v2/model.proto b/exporters/jaeger-proto/src/main/proto/jaeger/api_v2/model.proto deleted file mode 100644 index 0d2bd212787..00000000000 --- a/exporters/jaeger-proto/src/main/proto/jaeger/api_v2/model.proto +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -syntax="proto3"; - -package jaeger.api_v2; - -import "google/protobuf/timestamp.proto"; -import "google/protobuf/duration.proto"; - -option java_package = "io.opentelemetry.exporter.jaeger.proto.api_v2"; - -enum ValueType { - STRING = 0; - BOOL = 1; - INT64 = 2; - FLOAT64 = 3; - BINARY = 4; -}; - -message Log { - google.protobuf.Timestamp timestamp = 1; - repeated KeyValue fields = 2; -} - -message KeyValue { - string key = 1; - ValueType v_type = 2; - string v_str = 3; - bool v_bool = 4; - int64 v_int64 = 5; - double v_float64 = 6; - bytes v_binary = 7; -} - -enum SpanRefType { - CHILD_OF = 0; - FOLLOWS_FROM = 1; -}; - -message SpanRef { - bytes trace_id = 1; - bytes span_id = 2; - SpanRefType ref_type = 3; -} - -message Process { - string service_name = 1; - repeated KeyValue tags = 2; -} - -message Span { - bytes trace_id = 1; - bytes span_id = 2; - string operation_name = 3; - repeated SpanRef references = 4; - uint32 flags = 5; - google.protobuf.Timestamp start_time = 6; - google.protobuf.Duration duration = 7; - repeated KeyValue tags = 8; - repeated Log logs = 9; - Process process = 10; - string process_id = 11; - repeated string warnings = 12; -} - -message Trace { - message ProcessMapping { - string process_id = 1; - Process process = 2; - } - repeated Span spans = 1; - repeated ProcessMapping process_map = 2; - repeated string warnings = 3; -} - -message Batch { - repeated Span spans = 1; - Process process = 2; -} - -message DependencyLink { - string parent = 1; - string child = 2; - uint64 call_count = 3; - string source = 4; -} diff --git a/exporters/jaeger-proto/src/main/proto/jaeger/api_v2/time.proto b/exporters/jaeger-proto/src/main/proto/jaeger/api_v2/time.proto deleted file mode 100644 index b92529d7a56..00000000000 --- a/exporters/jaeger-proto/src/main/proto/jaeger/api_v2/time.proto +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -// Includes work from: - -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -syntax = "proto3"; - -package io.opentelemetry.internal; - -option java_package = "io.opentelemetry.exporter.jaeger.internal.protobuf"; -option java_outer_classname = "TimeProto"; -option java_multiple_files = true; - -// Copied from google.protobuf.Timestamp to provide access to the wire format. -message Time { - // Represents seconds of UTC time since Unix epoch - // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to - // 9999-12-31T23:59:59Z inclusive. - int64 seconds = 1; - - // Non-negative fractions of a second at nanosecond resolution. Negative - // second values with fractions must still have non-negative nanos values - // that count forward in time. Must be from 0 to 999,999,999 - // inclusive. - int32 nanos = 2; -} diff --git a/exporters/jaeger-thrift/build.gradle.kts b/exporters/jaeger-thrift/build.gradle.kts deleted file mode 100644 index 383b9d6943c..00000000000 --- a/exporters/jaeger-thrift/build.gradle.kts +++ /dev/null @@ -1,27 +0,0 @@ -plugins { - id("otel.java-conventions") - id("otel.publish-conventions") - - id("otel.animalsniffer-conventions") -} - -description = "OpenTelemetry - Jaeger Thrift Exporter" -otelJava.moduleName.set("io.opentelemetry.exporter.jaeger.thrift") - -dependencies { - api(project(":sdk:all")) - - implementation(project(":sdk:all")) - - implementation("com.fasterxml.jackson.jr:jackson-jr-objects") - implementation("io.jaegertracing:jaeger-client") { - exclude("com.google.code.gson", "gson") - } - - testImplementation("com.fasterxml.jackson.jr:jackson-jr-stree") - testImplementation("org.testcontainers:junit-jupiter") - testImplementation("com.squareup.okhttp3:okhttp") - testImplementation("com.google.guava:guava-testlib") - - testImplementation(project(":sdk:testing")) -} diff --git a/exporters/jaeger-thrift/src/main/java/io/opentelemetry/exporter/jaeger/thrift/Adapter.java b/exporters/jaeger-thrift/src/main/java/io/opentelemetry/exporter/jaeger/thrift/Adapter.java deleted file mode 100644 index cd3bf4ec26f..00000000000 --- a/exporters/jaeger-thrift/src/main/java/io/opentelemetry/exporter/jaeger/thrift/Adapter.java +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger.thrift; - -import static io.opentelemetry.api.common.AttributeKey.booleanKey; - -import com.fasterxml.jackson.jr.ob.JSON; -import io.jaegertracing.thriftjava.Log; -import io.jaegertracing.thriftjava.Span; -import io.jaegertracing.thriftjava.SpanRef; -import io.jaegertracing.thriftjava.SpanRefType; -import io.jaegertracing.thriftjava.Tag; -import io.jaegertracing.thriftjava.TagType; -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.trace.SpanKind; -import io.opentelemetry.api.trace.StatusCode; -import io.opentelemetry.sdk.trace.data.EventData; -import io.opentelemetry.sdk.trace.data.LinkData; -import io.opentelemetry.sdk.trace.data.SpanData; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Locale; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import javax.annotation.concurrent.ThreadSafe; - -/** Adapts OpenTelemetry objects to Jaeger objects. */ -@ThreadSafe -final class Adapter { - - static final AttributeKey KEY_ERROR = booleanKey("error"); - static final String KEY_LOG_EVENT = "event"; - static final String KEY_EVENT_DROPPED_ATTRIBUTES_COUNT = "otel.event.dropped_attributes_count"; - static final String KEY_DROPPED_ATTRIBUTES_COUNT = "otel.dropped_attributes_count"; - static final String KEY_DROPPED_EVENTS_COUNT = "otel.dropped_events_count"; - static final String KEY_SPAN_KIND = "span.kind"; - static final String KEY_SPAN_STATUS_MESSAGE = "otel.status_message"; - static final String KEY_SPAN_STATUS_CODE = "otel.status_code"; - static final String KEY_INSTRUMENTATION_SCOPE_NAME = "otel.scope.name"; - static final String KEY_INSTRUMENTATION_SCOPE_VERSION = "otel.scope.version"; - static final String KEY_INSTRUMENTATION_LIBRARY_NAME = "otel.library.name"; - static final String KEY_INSTRUMENTATION_LIBRARY_VERSION = "otel.library.version"; - - private Adapter() {} - - /** - * Converts a list of {@link SpanData} into a collection of Jaeger's {@link Span}. - * - * @param spans the list of spans to be converted - * @return the collection of Jaeger spans - * @see #toJaeger(SpanData) - */ - static List toJaeger(Collection spans) { - return spans.stream().map(Adapter::toJaeger).collect(Collectors.toList()); - } - - /** - * Converts a single {@link SpanData} into a Jaeger's {@link Span}. - * - * @param span the span to be converted - * @return the Jaeger span - */ - static Span toJaeger(SpanData span) { - Span target = new Span(); - - long traceIdHigh = traceIdAsLongHigh(span.getTraceId()); - long traceIdLow = traceIdAsLongLow(span.getTraceId()); - - target.setTraceIdHigh(traceIdHigh); - target.setTraceIdLow(traceIdLow); - target.setSpanId(spanIdAsLong(span.getSpanId())); - target.setOperationName(span.getName()); - target.setStartTime(TimeUnit.NANOSECONDS.toMicros(span.getStartEpochNanos())); - target.setDuration( - TimeUnit.NANOSECONDS.toMicros(span.getEndEpochNanos() - span.getStartEpochNanos())); - - List tags = toTags(span.getAttributes()); - int droppedAttributes = span.getTotalAttributeCount() - span.getAttributes().size(); - if (droppedAttributes > 0) { - tags.add(new Tag(KEY_DROPPED_ATTRIBUTES_COUNT, TagType.LONG).setVLong(droppedAttributes)); - } - - target.setLogs(toJaegerLogs(span.getEvents())); - int droppedEvents = span.getTotalRecordedEvents() - span.getEvents().size(); - if (droppedEvents > 0) { - tags.add(new Tag(KEY_DROPPED_EVENTS_COUNT, TagType.LONG).setVLong(droppedEvents)); - } - - List references = toSpanRefs(span.getLinks()); - - // add the parent span - if (span.getParentSpanContext().isValid()) { - long parentSpanId = spanIdAsLong(span.getParentSpanId()); - references.add(new SpanRef(SpanRefType.CHILD_OF, traceIdLow, traceIdHigh, parentSpanId)); - target.setParentSpanId(parentSpanId); - } - target.setReferences(references); - - if (span.getKind() != SpanKind.INTERNAL) { - tags.add( - new Tag(KEY_SPAN_KIND, TagType.STRING) - .setVStr(span.getKind().name().toLowerCase(Locale.ROOT))); - } - - if (!span.getStatus().getDescription().isEmpty()) { - tags.add( - new Tag(KEY_SPAN_STATUS_MESSAGE, TagType.STRING) - .setVStr(span.getStatus().getDescription())); - } - - if (span.getStatus().getStatusCode() != StatusCode.UNSET) { - tags.add( - new Tag(KEY_SPAN_STATUS_CODE, TagType.STRING) - .setVStr(span.getStatus().getStatusCode().name())); - } - - tags.add( - new Tag(KEY_INSTRUMENTATION_SCOPE_NAME, TagType.STRING) - .setVStr(span.getInstrumentationScopeInfo().getName())); - // Include instrumentation library name for backwards compatibility - tags.add( - new Tag(KEY_INSTRUMENTATION_LIBRARY_NAME, TagType.STRING) - .setVStr(span.getInstrumentationScopeInfo().getName())); - - if (span.getInstrumentationScopeInfo().getVersion() != null) { - tags.add( - new Tag(KEY_INSTRUMENTATION_SCOPE_VERSION, TagType.STRING) - .setVStr(span.getInstrumentationScopeInfo().getVersion())); - // Include instrumentation library name for backwards compatibility - tags.add( - new Tag(KEY_INSTRUMENTATION_LIBRARY_VERSION, TagType.STRING) - .setVStr(span.getInstrumentationScopeInfo().getVersion())); - } - - if (span.getStatus().getStatusCode() == StatusCode.ERROR) { - tags.add(toTag(KEY_ERROR, true)); - } - target.setTags(tags); - - return target; - } - - /** - * Converts {@link EventData}s into a collection of Jaeger's {@link Log}. - * - * @param timedEvents the timed events to be converted - * @return a collection of Jaeger logs - * @see #toJaegerLog(EventData) - */ - // VisibleForTesting - static List toJaegerLogs(List timedEvents) { - return timedEvents.stream().map(Adapter::toJaegerLog).collect(Collectors.toList()); - } - - /** - * Converts a {@link EventData} into Jaeger's {@link Log}. - * - * @param event the timed event to be converted - * @return a Jaeger log - */ - // VisibleForTesting - static Log toJaegerLog(EventData event) { - Log result = new Log(); - result.setTimestamp(TimeUnit.NANOSECONDS.toMicros(event.getEpochNanos())); - result.addToFields(new Tag(KEY_LOG_EVENT, TagType.STRING).setVStr(event.getName())); - - int droppedAttributesCount = event.getDroppedAttributesCount(); - if (droppedAttributesCount > 0) { - result.addToFields( - new Tag(KEY_EVENT_DROPPED_ATTRIBUTES_COUNT, TagType.LONG) - .setVLong(droppedAttributesCount)); - } - List attributeTags = toTags(event.getAttributes()); - for (Tag attributeTag : attributeTags) { - result.addToFields(attributeTag); - } - return result; - } - - /** - * Converts a map of attributes into a collection of Jaeger's {@link Tag}. - * - * @param attributes the span attributes - * @return a collection of Jaeger key values - * @see #toTag - */ - static List toTags(Attributes attributes) { - List results = new ArrayList<>(); - attributes.forEach((key, value) -> results.add(toTag(key, value))); - return results; - } - - /** - * Converts the given {@link AttributeKey} and value into Jaeger's {@link Tag}. - * - * @param key the entry key as string - * @param value the entry value - * @return a Jaeger key value - */ - // VisibleForTesting - static Tag toTag(AttributeKey key, Object value) { - switch (key.getType()) { - case STRING: - return new Tag(key.getKey(), TagType.STRING).setVStr((String) value); - case LONG: - return new Tag(key.getKey(), TagType.LONG).setVLong((long) value); - case BOOLEAN: - return new Tag(key.getKey(), TagType.BOOL).setVBool((boolean) value); - case DOUBLE: - return new Tag(key.getKey(), TagType.DOUBLE).setVDouble((double) value); - default: - try { - return new Tag(key.getKey(), TagType.STRING).setVStr(JSON.std.asString(value)); - } catch (IOException e) { - // Can't have an exception serializing a plain Java object to a String. Add an exception - // mostly to satisfy the compiler. - throw new UncheckedIOException( - "Error serializing a plain Java object to String. " - + "This is a bug in the OpenTelemetry library.", - e); - } - } - } - - /** - * Converts {@link LinkData}s into a collection of Jaeger's {@link SpanRef}. - * - * @param links the span's links property to be converted - * @return a collection of Jaeger span references - */ - // VisibleForTesting - static List toSpanRefs(List links) { - List spanRefs = new ArrayList<>(links.size()); - for (LinkData link : links) { - spanRefs.add(toSpanRef(link)); - } - return spanRefs; - } - - /** - * Converts a single {@link LinkData} into a Jaeger's {@link SpanRef}. - * - * @param link the OpenTelemetry link to be converted - * @return the Jaeger span reference - */ - // VisibleForTesting - static SpanRef toSpanRef(LinkData link) { - // we can assume that all links are *follows from* - // https://github.com/open-telemetry/opentelemetry-java/issues/475 - // https://github.com/open-telemetry/opentelemetry-java/pull/481/files#r312577862 - return new SpanRef( - SpanRefType.FOLLOWS_FROM, - traceIdAsLongLow(link.getSpanContext().getTraceId()), - traceIdAsLongHigh(link.getSpanContext().getTraceId()), - spanIdAsLong(link.getSpanContext().getSpanId())); - } - - private static long traceIdAsLongHigh(String traceId) { - return new BigInteger(traceId.substring(0, 16), 16).longValue(); - } - - private static long traceIdAsLongLow(String traceId) { - return new BigInteger(traceId.substring(16, 32), 16).longValue(); - } - - private static long spanIdAsLong(String spanId) { - return new BigInteger(spanId, 16).longValue(); - } -} diff --git a/exporters/jaeger-thrift/src/main/java/io/opentelemetry/exporter/jaeger/thrift/JaegerThriftSpanExporter.java b/exporters/jaeger-thrift/src/main/java/io/opentelemetry/exporter/jaeger/thrift/JaegerThriftSpanExporter.java deleted file mode 100644 index bfab4122b05..00000000000 --- a/exporters/jaeger-thrift/src/main/java/io/opentelemetry/exporter/jaeger/thrift/JaegerThriftSpanExporter.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger.thrift; - -import io.jaegertracing.internal.exceptions.SenderException; -import io.jaegertracing.thrift.internal.senders.ThriftSender; -import io.jaegertracing.thriftjava.Process; -import io.jaegertracing.thriftjava.Span; -import io.jaegertracing.thriftjava.Tag; -import io.jaegertracing.thriftjava.TagType; -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.sdk.common.CompletableResultCode; -import io.opentelemetry.sdk.internal.ThrottlingLogger; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.trace.data.SpanData; -import io.opentelemetry.sdk.trace.export.SpanExporter; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.logging.Level; -import java.util.logging.Logger; -import java.util.stream.Collectors; -import javax.annotation.concurrent.ThreadSafe; - -/** - * Exports spans to Jaeger via Thrift, using Jaeger's thrift model. - * - * @deprecated Use {@code OtlpGrpcSpanExporter} or {@code OtlpHttpSpanExporter} from opentelemetry-exporter-otlp - * instead. - */ -@ThreadSafe -@Deprecated -public final class JaegerThriftSpanExporter implements SpanExporter { - - private static final AttributeKey SERVICE_NAME = AttributeKey.stringKey("service.name"); - - static final String DEFAULT_ENDPOINT = "http://localhost:14268/api/traces"; - - private static final String DEFAULT_HOST_NAME = "unknown"; - private static final String CLIENT_VERSION_KEY = "jaeger.version"; - private static final String CLIENT_VERSION_VALUE = "opentelemetry-java"; - private static final String HOSTNAME_KEY = "hostname"; - private static final String IP_KEY = "ip"; - private static final String IP_DEFAULT = "0.0.0.0"; - - private final ThrottlingLogger logger = - new ThrottlingLogger(Logger.getLogger(JaegerThriftSpanExporter.class.getName())); - private final AtomicBoolean isShutdown = new AtomicBoolean(); - private final ThriftSender thriftSender; - private final Process process; - - /** - * Creates a new Jaeger gRPC Span Reporter with the given name, using the given channel. - * - * @param thriftSender The sender used for sending the data. - */ - JaegerThriftSpanExporter(ThriftSender thriftSender) { - this.thriftSender = thriftSender; - String hostname; - String ipv4; - - try { - hostname = InetAddress.getLocalHost().getHostName(); - ipv4 = InetAddress.getLocalHost().getHostAddress(); - } catch (UnknownHostException e) { - hostname = DEFAULT_HOST_NAME; - ipv4 = IP_DEFAULT; - } - - Tag clientTag = new Tag(CLIENT_VERSION_KEY, TagType.STRING).setVStr(CLIENT_VERSION_VALUE); - Tag ipv4Tag = new Tag(IP_KEY, TagType.STRING).setVStr(ipv4); - Tag hostnameTag = new Tag(HOSTNAME_KEY, TagType.STRING).setVStr(hostname); - - this.process = new Process(); - this.process.addToTags(clientTag); - this.process.addToTags(ipv4Tag); - this.process.addToTags(hostnameTag); - } - - /** - * Submits all the given spans in a single batch to the Jaeger collector. - * - * @param spans the list of sampled Spans to be exported. - * @return the result of the operation - */ - @Override - public CompletableResultCode export(Collection spans) { - if (isShutdown.get()) { - return CompletableResultCode.ofFailure(); - } - - Map> batches = - spans.stream().collect(Collectors.groupingBy(SpanData::getResource)).entrySet().stream() - .collect( - Collectors.toMap( - entry -> createProcess(entry.getKey()), - entry -> Adapter.toJaeger(entry.getValue()))); - List batchResults = new ArrayList<>(batches.size()); - batches.forEach( - (process, jaegerSpans) -> { - CompletableResultCode batchResult = new CompletableResultCode(); - batchResults.add(batchResult); - try { - // todo: consider making truly async - thriftSender.send(process, jaegerSpans); - batchResult.succeed(); - } catch (SenderException e) { - logger.log(Level.WARNING, "Failed to export spans", e); - batchResult.fail(); - } - }); - return CompletableResultCode.ofAll(batchResults); - } - - private Process createProcess(Resource resource) { - Process result = new Process(this.process); - - String serviceName = resource.getAttribute(SERVICE_NAME); - if (serviceName == null || serviceName.isEmpty()) { - serviceName = Resource.getDefault().getAttribute(SERVICE_NAME); - } - // In practice should never be null unless the default Resource spec is changed. - if (serviceName != null) { - result.setServiceName(serviceName); - } - - List tags = Adapter.toTags(resource.getAttributes()); - tags.forEach(result::addToTags); - return result; - } - - /** - * The Jaeger exporter does not batch spans, so this method will immediately return with success. - * - * @return always Success - */ - @Override - public CompletableResultCode flush() { - return CompletableResultCode.ofSuccess(); - } - - /** - * Returns a new builder instance for this exporter. - * - * @return a new builder instance for this exporter. - */ - public static JaegerThriftSpanExporterBuilder builder() { - return new JaegerThriftSpanExporterBuilder(); - } - - /** - * Initiates an orderly shutdown in which preexisting calls continue but new calls are immediately - * cancelled. - */ - @Override - public CompletableResultCode shutdown() { - if (!isShutdown.compareAndSet(false, true)) { - logger.log(Level.INFO, "Calling shutdown() multiple times."); - } - return CompletableResultCode.ofSuccess(); - } -} diff --git a/exporters/jaeger-thrift/src/main/java/io/opentelemetry/exporter/jaeger/thrift/JaegerThriftSpanExporterBuilder.java b/exporters/jaeger-thrift/src/main/java/io/opentelemetry/exporter/jaeger/thrift/JaegerThriftSpanExporterBuilder.java deleted file mode 100644 index faa88e53a21..00000000000 --- a/exporters/jaeger-thrift/src/main/java/io/opentelemetry/exporter/jaeger/thrift/JaegerThriftSpanExporterBuilder.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger.thrift; - -import io.jaegertracing.thrift.internal.senders.HttpSender; -import io.jaegertracing.thrift.internal.senders.ThriftSender; -import javax.annotation.Nullable; -import org.apache.thrift.transport.TTransportException; - -/** - * Builder utility for this exporter. - * - * @deprecated Use {@code OtlpGrpcSpanExporter} or {@code OtlpHttpSpanExporter} from opentelemetry-exporter-otlp - * instead. - */ -@Deprecated -public final class JaegerThriftSpanExporterBuilder { - - private String endpoint = JaegerThriftSpanExporter.DEFAULT_ENDPOINT; - @Nullable private ThriftSender thriftSender; - - /** - * Explicitly set the {@link ThriftSender} instance to use for this Exporter. Will override any - * endpoint that has been set. - * - * @param thriftSender The ThriftSender to use. - * @return this. - */ - public JaegerThriftSpanExporterBuilder setThriftSender(ThriftSender thriftSender) { - this.thriftSender = thriftSender; - return this; - } - - /** - * Sets the Jaeger endpoint to connect to. Needs to include the full API path for trace ingest. - * - *

Optional, defaults to "http://localhost:14268/api/traces". - * - * @param endpoint The Jaeger endpoint URL, ex. "https://jaegerhost:14268/api/traces". - * @return this. - */ - public JaegerThriftSpanExporterBuilder setEndpoint(String endpoint) { - this.endpoint = endpoint; - return this; - } - - /** - * Constructs a new instance of the exporter based on the builder's values. - * - * @return a new exporter's instance. - */ - public JaegerThriftSpanExporter build() { - ThriftSender thriftSender = this.thriftSender; - if (thriftSender == null) { - try { - thriftSender = new HttpSender.Builder(endpoint).build(); - } catch (TTransportException e) { - throw new IllegalStateException("Failed to construct a thrift HttpSender.", e); - } - } - return new JaegerThriftSpanExporter(thriftSender); - } - - JaegerThriftSpanExporterBuilder() {} -} diff --git a/exporters/jaeger-thrift/src/test/java/io/opentelemetry/exporter/jaeger/thrift/AdapterTest.java b/exporters/jaeger-thrift/src/test/java/io/opentelemetry/exporter/jaeger/thrift/AdapterTest.java deleted file mode 100644 index a084b671a36..00000000000 --- a/exporters/jaeger-thrift/src/test/java/io/opentelemetry/exporter/jaeger/thrift/AdapterTest.java +++ /dev/null @@ -1,363 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger.thrift; - -import static io.opentelemetry.api.common.AttributeKey.booleanArrayKey; -import static io.opentelemetry.api.common.AttributeKey.booleanKey; -import static io.opentelemetry.api.common.AttributeKey.doubleArrayKey; -import static io.opentelemetry.api.common.AttributeKey.doubleKey; -import static io.opentelemetry.api.common.AttributeKey.longArrayKey; -import static io.opentelemetry.api.common.AttributeKey.longKey; -import static io.opentelemetry.api.common.AttributeKey.stringArrayKey; -import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static java.util.concurrent.TimeUnit.MILLISECONDS; -import static org.assertj.core.api.Assertions.assertThat; - -import com.google.common.io.BaseEncoding; -import io.jaegertracing.thriftjava.Log; -import io.jaegertracing.thriftjava.SpanRef; -import io.jaegertracing.thriftjava.SpanRefType; -import io.jaegertracing.thriftjava.Tag; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.trace.SpanContext; -import io.opentelemetry.api.trace.SpanKind; -import io.opentelemetry.api.trace.StatusCode; -import io.opentelemetry.api.trace.TraceFlags; -import io.opentelemetry.api.trace.TraceState; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.testing.trace.TestSpanData; -import io.opentelemetry.sdk.trace.data.EventData; -import io.opentelemetry.sdk.trace.data.LinkData; -import io.opentelemetry.sdk.trace.data.SpanData; -import io.opentelemetry.sdk.trace.data.StatusData; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import javax.annotation.Nullable; -import org.junit.jupiter.api.Test; - -/** Unit tests for {@link Adapter}. */ -class AdapterTest { - private static final BaseEncoding hex = BaseEncoding.base16().lowerCase(); - private static final String LINK_TRACE_ID = "ff000000000000000000000000cba123"; - private static final String LINK_SPAN_ID = "0000000000fed456"; - private static final String TRACE_ID = "0000000000000000ff00000000abc123"; - private static final String SPAN_ID = "ff00000000def456"; - private static final String PARENT_SPAN_ID = "0000000000aef789"; - - @Test - void testThriftSpans() { - long duration = 900; // ms - long startMs = System.currentTimeMillis(); - long endMs = startMs + duration; - - SpanData span = getSpanData(startMs, endMs, SpanKind.SERVER); - List spans = Collections.singletonList(span); - - List jaegerSpans = Adapter.toJaeger(spans); - - // the span contents are checked somewhere else - assertThat(jaegerSpans).hasSize(1); - } - - @Test - void testThriftSpan() { - long duration = 900; // ms - long startMs = System.currentTimeMillis(); - long endMs = startMs + duration; - - SpanData span = getSpanData(startMs, endMs, SpanKind.SERVER, 2, 4); - - // test - io.jaegertracing.thriftjava.Span jaegerSpan = Adapter.toJaeger(span); - - String rebuildTraceId = - traceIdFromLongs(jaegerSpan.getTraceIdHigh(), jaegerSpan.getTraceIdLow()); - assertThat(rebuildTraceId).isEqualTo(span.getTraceId()); - assertThat(spanIdFromLong(jaegerSpan.getSpanId())).isEqualTo(span.getSpanId()); - assertThat(jaegerSpan.getOperationName()).isEqualTo("GET /api/endpoint"); - assertThat(jaegerSpan.getStartTime()).isEqualTo(MILLISECONDS.toMicros(startMs)); - assertThat(jaegerSpan.getDuration()).isEqualTo(MILLISECONDS.toMicros(duration)); - - assertThat(jaegerSpan.getTagsSize()).isEqualTo(8); - assertThat(getValue(jaegerSpan.getTags(), Adapter.KEY_SPAN_KIND).getVStr()).isEqualTo("server"); - assertThat(getValue(jaegerSpan.getTags(), Adapter.KEY_SPAN_STATUS_CODE).getVLong()) - .isEqualTo(0); - assertThat(getValue(jaegerSpan.getTags(), Adapter.KEY_SPAN_STATUS_MESSAGE).getVStr()) - .isEqualTo("ok!"); - assertThat(getValue(jaegerSpan.getTags(), Adapter.KEY_DROPPED_EVENTS_COUNT).getVLong()) - .isEqualTo(1); - assertThat(getValue(jaegerSpan.getTags(), Adapter.KEY_DROPPED_ATTRIBUTES_COUNT).getVLong()) - .isEqualTo(3); - - assertThat(jaegerSpan.getLogsSize()).isEqualTo(1); - Log log = jaegerSpan.getLogs().get(0); - assertThat(getValue(log.getFields(), Adapter.KEY_LOG_EVENT).getVStr()) - .isEqualTo("the log message"); - assertThat(getValue(log.getFields(), "foo").getVStr()).isEqualTo("bar"); - - assertThat(jaegerSpan.getReferencesSize()).isEqualTo(2); - - assertHasFollowsFrom(jaegerSpan); - assertHasParent(jaegerSpan); - } - - @Test - void testThriftSpan_internal() { - long duration = 900; // ms - long startMs = System.currentTimeMillis(); - long endMs = startMs + duration; - - SpanData span = getSpanData(startMs, endMs, SpanKind.INTERNAL); - - // test - io.jaegertracing.thriftjava.Span jaegerSpan = Adapter.toJaeger(span); - - assertThat(jaegerSpan.getTagsSize()).isEqualTo(5); - assertThat(getValue(jaegerSpan.getTags(), Adapter.KEY_SPAN_KIND)).isNull(); - } - - @Test - void testJaegerLogs() { - // prepare - EventData eventsData = getTimedEvent(); - - // test - Collection logs = Adapter.toJaegerLogs(Collections.singletonList(eventsData)); - - // verify - assertThat(logs).hasSize(1); - } - - @Test - void testJaegerLog() { - // prepare - EventData event = getTimedEvent(); - - // test - Log log = Adapter.toJaegerLog(event); - - // verify - assertThat(log.getFieldsSize()).isEqualTo(2); - - assertThat(getValue(log.getFields(), Adapter.KEY_LOG_EVENT).getVStr()) - .isEqualTo("the log message"); - assertThat(getValue(log.getFields(), "foo").getVStr()).isEqualTo("bar"); - assertThat(getValue(log.getFields(), Adapter.KEY_EVENT_DROPPED_ATTRIBUTES_COUNT)).isNull(); - } - - @Test - void jaegerLog_droppedAttributes() { - EventData event = getTimedEvent(3); - - // test - Log log = Adapter.toJaegerLog(event); - - // verify - assertThat(getValue(log.getFields(), Adapter.KEY_EVENT_DROPPED_ATTRIBUTES_COUNT).getVLong()) - .isEqualTo(2); - } - - @Test - void testKeyValue() { - // test - Tag kvB = Adapter.toTag(booleanKey("valueB"), true); - Tag kvD = Adapter.toTag(doubleKey("valueD"), 1.); - Tag kvI = Adapter.toTag(longKey("valueI"), 2L); - Tag kvS = Adapter.toTag(stringKey("valueS"), "foobar"); - Tag kvArrayB = Adapter.toTag(booleanArrayKey("valueArrayB"), Arrays.asList(true, false)); - Tag kvArrayD = Adapter.toTag(doubleArrayKey("valueArrayD"), Arrays.asList(1.2345, 6.789)); - Tag kvArrayI = Adapter.toTag(longArrayKey("valueArrayI"), Arrays.asList(12345L, 67890L)); - Tag kvArrayS = Adapter.toTag(stringArrayKey("valueArrayS"), Arrays.asList("foobar", "barfoo")); - - // verify - assertThat(kvB.isVBool()).isTrue(); - - assertThat(kvD.getVDouble()).isEqualTo(1); - assertThat(kvI.getVLong()).isEqualTo(2); - assertThat(kvS.getVStr()).isEqualTo("foobar"); - assertThat(kvArrayB.getVStr()).isEqualTo("[true,false]"); - assertThat(kvArrayD.getVStr()).isEqualTo("[1.2345,6.789]"); - assertThat(kvArrayI.getVStr()).isEqualTo("[12345,67890]"); - assertThat(kvArrayS.getVStr()).isEqualTo("[\"foobar\",\"barfoo\"]"); - } - - @Test - void testSpanRefs() { - // prepare - LinkData link = - LinkData.create(createSpanContext("00000000000000000000000000cba123", "0000000000fed456")); - - // test - Collection spanRefs = Adapter.toSpanRefs(Collections.singletonList(link)); - - // verify - assertThat(spanRefs).hasSize(1); // the actual span ref is tested in another test - } - - @Test - void testSpanRef() { - // prepare - LinkData link = LinkData.create(createSpanContext(TRACE_ID, SPAN_ID)); - - // test - SpanRef spanRef = Adapter.toSpanRef(link); - - // verify - assertThat(spanIdFromLong(spanRef.getSpanId())).isEqualTo(SPAN_ID); - assertThat(traceIdFromLongs(spanRef.getTraceIdHigh(), spanRef.getTraceIdLow())) - .isEqualTo(TRACE_ID); - assertThat(spanRef.getRefType()).isEqualTo(SpanRefType.FOLLOWS_FROM); - } - - @Test - void testStatusNotUnset() { - long startMs = System.currentTimeMillis(); - long endMs = startMs + 900; - SpanData span = - TestSpanData.builder() - .setHasEnded(true) - .setSpanContext(createSpanContext(TRACE_ID, SPAN_ID)) - .setName("GET /api/endpoint") - .setStartEpochNanos(MILLISECONDS.toNanos(startMs)) - .setEndEpochNanos(MILLISECONDS.toNanos(endMs)) - .setKind(SpanKind.SERVER) - .setStatus(StatusData.error()) - .setTotalRecordedEvents(0) - .setTotalRecordedLinks(0) - .build(); - - assertThat(Adapter.toJaeger(span)).isNotNull(); - } - - @Test - void testSpanError() { - Attributes attributes = - Attributes.of( - stringKey("error.type"), - this.getClass().getName(), - stringKey("error.message"), - "server error"); - long startMs = System.currentTimeMillis(); - long endMs = startMs + 900; - SpanData span = - TestSpanData.builder() - .setHasEnded(true) - .setSpanContext(createSpanContext(TRACE_ID, SPAN_ID)) - .setName("GET /api/endpoint") - .setStartEpochNanos(MILLISECONDS.toNanos(startMs)) - .setEndEpochNanos(MILLISECONDS.toNanos(endMs)) - .setKind(SpanKind.SERVER) - .setStatus(StatusData.error()) - .setAttributes(attributes) - .setTotalRecordedEvents(0) - .setTotalRecordedLinks(0) - .build(); - - io.jaegertracing.thriftjava.Span jaegerSpan = Adapter.toJaeger(span); - assertThat(getValue(jaegerSpan.getTags(), "error.type").getVStr()) - .isEqualTo(this.getClass().getName()); - assertThat(getValue(jaegerSpan.getTags(), "error").isVBool()).isTrue(); - } - - private static EventData getTimedEvent() { - return getTimedEvent(-1); - } - - private static EventData getTimedEvent(int totalAttributeCount) { - long epochNanos = MILLISECONDS.toNanos(System.currentTimeMillis()); - Attributes attributes = Attributes.of(stringKey("foo"), "bar"); - if (totalAttributeCount <= 0) { - totalAttributeCount = attributes.size(); - } - return EventData.create(epochNanos, "the log message", attributes, totalAttributeCount); - } - - private static SpanData getSpanData(long startMs, long endMs, SpanKind kind) { - return getSpanData(startMs, endMs, kind, 1, 1); - } - - private static SpanData getSpanData( - long startMs, long endMs, SpanKind kind, int totalRecordedEvents, int totalAttributeCount) { - Attributes attributes = Attributes.of(booleanKey("valueB"), true); - - LinkData link = LinkData.create(createSpanContext(LINK_TRACE_ID, LINK_SPAN_ID), attributes); - - return TestSpanData.builder() - .setHasEnded(true) - .setSpanContext(createSpanContext(TRACE_ID, SPAN_ID)) - .setParentSpanContext( - SpanContext.create( - TRACE_ID, PARENT_SPAN_ID, TraceFlags.getDefault(), TraceState.getDefault())) - .setName("GET /api/endpoint") - .setStartEpochNanos(MILLISECONDS.toNanos(startMs)) - .setEndEpochNanos(MILLISECONDS.toNanos(endMs)) - .setAttributes(Attributes.of(booleanKey("valueB"), true)) - .setTotalAttributeCount(totalAttributeCount) - .setEvents(Collections.singletonList(getTimedEvent())) - .setTotalRecordedEvents(totalRecordedEvents) - .setLinks(Collections.singletonList(link)) - .setTotalRecordedLinks(1) - .setKind(kind) - .setResource(Resource.create(Attributes.empty())) - .setStatus(StatusData.create(StatusCode.OK, "ok!")) - .build(); - } - - private static SpanContext createSpanContext(String traceId, String spanId) { - return SpanContext.create(traceId, spanId, TraceFlags.getDefault(), TraceState.getDefault()); - } - - @Nullable - private static Tag getValue(List tagsList, String s) { - for (Tag kv : tagsList) { - if (kv.getKey().equals(s)) { - return kv; - } - } - return null; - } - - private static void assertHasFollowsFrom(io.jaegertracing.thriftjava.Span jaegerSpan) { - boolean found = false; - for (SpanRef spanRef : jaegerSpan.getReferences()) { - - if (SpanRefType.FOLLOWS_FROM.equals(spanRef.getRefType())) { - assertThat(traceIdFromLongs(spanRef.getTraceIdHigh(), spanRef.getTraceIdLow())) - .isEqualTo(LINK_TRACE_ID); - assertThat(spanIdFromLong(spanRef.getSpanId())).isEqualTo(LINK_SPAN_ID); - found = true; - } - } - assertThat(found).isTrue(); - } - - private static void assertHasParent(io.jaegertracing.thriftjava.Span jaegerSpan) { - boolean found = false; - for (SpanRef spanRef : jaegerSpan.getReferences()) { - if (SpanRefType.CHILD_OF.equals(spanRef.getRefType())) { - assertThat(traceIdFromLongs(spanRef.getTraceIdHigh(), spanRef.getTraceIdLow())) - .isEqualTo(TRACE_ID); - assertThat(spanIdFromLong(spanRef.getSpanId())).isEqualTo(PARENT_SPAN_ID); - found = true; - } - } - assertThat(found).isTrue(); - assertThat(spanIdFromLong(jaegerSpan.getParentSpanId())).isEqualTo(PARENT_SPAN_ID); - } - - private static String traceIdFromLongs(long high, long low) { - return hex.encode( - ByteBuffer.allocate(16).order(ByteOrder.BIG_ENDIAN).putLong(high).putLong(low).array()); - } - - private static String spanIdFromLong(long id) { - return hex.encode(ByteBuffer.allocate(8).order(ByteOrder.BIG_ENDIAN).putLong(id).array()); - } -} diff --git a/exporters/jaeger-thrift/src/test/java/io/opentelemetry/exporter/jaeger/thrift/JaegerThriftIntegrationTest.java b/exporters/jaeger-thrift/src/test/java/io/opentelemetry/exporter/jaeger/thrift/JaegerThriftIntegrationTest.java deleted file mode 100644 index 6aa33e2efc1..00000000000 --- a/exporters/jaeger-thrift/src/test/java/io/opentelemetry/exporter/jaeger/thrift/JaegerThriftIntegrationTest.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger.thrift; - -import static io.opentelemetry.api.common.AttributeKey.stringKey; - -import com.fasterxml.jackson.core.TreeNode; -import com.fasterxml.jackson.jr.ob.JSON; -import com.fasterxml.jackson.jr.stree.JacksonJrsTreeCodec; -import io.jaegertracing.thrift.internal.senders.UdpSender; -import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.trace.Span; -import io.opentelemetry.sdk.OpenTelemetrySdk; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.trace.SdkTracerProvider; -import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; -import java.time.Duration; -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.Response; -import org.apache.thrift.transport.TTransportException; -import org.awaitility.Awaitility; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.output.Slf4jLogConsumer; -import org.testcontainers.containers.wait.strategy.Wait; -import org.testcontainers.images.PullPolicy; -import org.testcontainers.junit.jupiter.Container; -import org.testcontainers.junit.jupiter.Testcontainers; - -@Testcontainers(disabledWithoutDocker = true) -@SuppressWarnings("deprecation") // Testing deprecated code -class JaegerThriftIntegrationTest { - private static final OkHttpClient client = new OkHttpClient(); - - private static final int QUERY_PORT = 16686; - private static final int THRIFT_HTTP_PORT = 14268; - - private static final int THRIFT_UDP_PORT = 6831; - private static final int HEALTH_PORT = 14269; - private static final String SERVICE_NAME = "E2E-test"; - private static final String JAEGER_URL = "http://localhost"; - - @Container - public static GenericContainer jaegerContainer = - new GenericContainer<>("ghcr.io/open-telemetry/opentelemetry-java/jaeger:1.32") - .withImagePullPolicy(PullPolicy.alwaysPull()) - .withExposedPorts(THRIFT_HTTP_PORT, THRIFT_UDP_PORT, QUERY_PORT, HEALTH_PORT) - .withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("jaeger"))) - .waitingFor(Wait.forHttp("/").forPort(HEALTH_PORT)); - - @ParameterizedTest - @ValueSource(booleans = {false, true}) - void testJaegerIntegration(boolean udp) { - OpenTelemetry openTelemetry = initOpenTelemetry(udp); - imitateWork(openTelemetry); - Awaitility.await() - .atMost(Duration.ofSeconds(30)) - .until(JaegerThriftIntegrationTest::assertJaegerHasATrace); - } - - private static OpenTelemetry initOpenTelemetry(boolean udp) { - JaegerThriftSpanExporterBuilder jaegerExporter = JaegerThriftSpanExporter.builder(); - - if (udp) { - int mappedPort = jaegerContainer.getMappedPort(THRIFT_UDP_PORT); - try { - jaegerExporter.setThriftSender(new UdpSender("localhost", mappedPort, 0)); - } catch (TTransportException e) { - throw new IllegalStateException(e); - } - } else { - int mappedPort = jaegerContainer.getMappedPort(THRIFT_HTTP_PORT); - jaegerExporter.setEndpoint(JAEGER_URL + ":" + mappedPort + "/api/traces"); - } - - return OpenTelemetrySdk.builder() - .setTracerProvider( - SdkTracerProvider.builder() - .addSpanProcessor(SimpleSpanProcessor.create(jaegerExporter.build())) - .setResource( - Resource.getDefault().toBuilder() - .put(stringKey("service.name"), SERVICE_NAME) - .build()) - .build()) - .build(); - } - - private void imitateWork(OpenTelemetry openTelemetry) { - Span span = - openTelemetry.getTracer(getClass().getCanonicalName()).spanBuilder("Test span").startSpan(); - span.addEvent("some event"); - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - span.end(); - } - - private static boolean assertJaegerHasATrace() { - try { - Integer mappedPort = jaegerContainer.getMappedPort(QUERY_PORT); - String url = - String.format( - "%s/api/traces?service=%s", - String.format(JAEGER_URL + ":%d", mappedPort), SERVICE_NAME); - - Request request = - new Request.Builder() - .url(url) - .header("Content-Type", "application/json") - .header("Accept", "application/json") - .build(); - - TreeNode json; - try (Response response = client.newCall(request).execute()) { - json = - JSON.builder() - .treeCodec(new JacksonJrsTreeCodec()) - .build() - .treeFrom(response.body().byteStream()); - } - - return json.get("data").get(0).get("traceID") != null; - } catch (Exception e) { - return false; - } - } -} diff --git a/exporters/jaeger-thrift/src/test/java/io/opentelemetry/exporter/jaeger/thrift/JaegerThriftSpanExporterTest.java b/exporters/jaeger-thrift/src/test/java/io/opentelemetry/exporter/jaeger/thrift/JaegerThriftSpanExporterTest.java deleted file mode 100644 index f90ceddbad7..00000000000 --- a/exporters/jaeger-thrift/src/test/java/io/opentelemetry/exporter/jaeger/thrift/JaegerThriftSpanExporterTest.java +++ /dev/null @@ -1,274 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger.thrift; - -import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.verify; - -import io.github.netmikey.logunit.api.LogCapturer; -import io.jaegertracing.internal.exceptions.SenderException; -import io.jaegertracing.thrift.internal.senders.ThriftSender; -import io.jaegertracing.thriftjava.Process; -import io.jaegertracing.thriftjava.Span; -import io.jaegertracing.thriftjava.SpanRef; -import io.jaegertracing.thriftjava.SpanRefType; -import io.jaegertracing.thriftjava.Tag; -import io.jaegertracing.thriftjava.TagType; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.trace.SpanContext; -import io.opentelemetry.api.trace.SpanKind; -import io.opentelemetry.api.trace.TraceFlags; -import io.opentelemetry.api.trace.TraceState; -import io.opentelemetry.internal.testing.slf4j.SuppressLogger; -import io.opentelemetry.sdk.common.CompletableResultCode; -import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.testing.trace.TestSpanData; -import io.opentelemetry.sdk.trace.data.SpanData; -import io.opentelemetry.sdk.trace.data.StatusData; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.time.Duration; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.TimeUnit; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.jupiter.api.extension.RegisterExtension; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -@SuppressWarnings("deprecation") // Testing deprecated code -class JaegerThriftSpanExporterTest { - - private static final String TRACE_ID = "a0000000000000000000000000abc123"; - private static final long TRACE_ID_HIGH = 0xa000000000000000L; - private static final long TRACE_ID_LOW = 0x0000000000abc123L; - private static final String SPAN_ID = "00000f0000def456"; - private static final long SPAN_ID_LONG = 0x00000f0000def456L; - private static final String SPAN_ID_2 = "00a0000000aef789"; - private static final long SPAN_ID_2_LONG = 0x00a0000000aef789L; - private static final SpanContext SPAN_CONTEXT = - SpanContext.create(TRACE_ID, SPAN_ID, TraceFlags.getDefault(), TraceState.getDefault()); - private static final SpanContext SPAN_CONTEXT_2 = - SpanContext.create(TRACE_ID, SPAN_ID_2, TraceFlags.getDefault(), TraceState.getDefault()); - private static final Duration DURATION = Duration.ofMillis(900); - - @RegisterExtension - LogCapturer logs = LogCapturer.create().captureForType(JaegerThriftSpanExporter.class); - - private JaegerThriftSpanExporter exporter; - @Mock private ThriftSender thriftSender; - - @BeforeEach - void beforeEach() { - exporter = JaegerThriftSpanExporter.builder().setThriftSender(thriftSender).build(); - } - - @Test - void testExport() throws SenderException, UnknownHostException { - SpanData span = - testSpanData( - Resource.create( - Attributes.of( - stringKey("service.name"), - "myServiceName", - stringKey("resource-attr-key"), - "resource-attr-value")), - "GET /api/endpoint", - SPAN_CONTEXT, - SPAN_CONTEXT_2); - - // test - CompletableResultCode result = exporter.export(Collections.singletonList(span)); - result.join(1, TimeUnit.SECONDS); - assertThat(result.isSuccess()).isEqualTo(true); - - // verify - Process expectedProcess = new Process("myServiceName"); - expectedProcess.addToTags( - new Tag("jaeger.version", TagType.STRING).setVStr("opentelemetry-java")); - expectedProcess.addToTags( - new Tag("ip", TagType.STRING).setVStr(InetAddress.getLocalHost().getHostAddress())); - expectedProcess.addToTags( - new Tag("hostname", TagType.STRING).setVStr(InetAddress.getLocalHost().getHostName())); - expectedProcess.addToTags( - new Tag("resource-attr-key", TagType.STRING).setVStr("resource-attr-value")); - expectedProcess.addToTags(new Tag("service.name", TagType.STRING).setVStr("myServiceName")); - - Span expectedSpan = - new Span() - .setTraceIdHigh(TRACE_ID_HIGH) - .setTraceIdLow(TRACE_ID_LOW) - .setSpanId(SPAN_ID_LONG) - .setOperationName("GET /api/endpoint") - .setReferences( - Collections.singletonList( - new SpanRef() - .setSpanId(SPAN_ID_2_LONG) - .setTraceIdHigh(TRACE_ID_HIGH) - .setTraceIdLow(TRACE_ID_LOW) - .setRefType(SpanRefType.CHILD_OF))) - .setParentSpanId(SPAN_ID_2_LONG) - .setStartTime(TimeUnit.NANOSECONDS.toMicros(span.getStartEpochNanos())) - .setDuration(DURATION.toMillis() * 1000) - .setLogs(Collections.emptyList()); - expectedSpan.addToTags(new Tag("span.kind", TagType.STRING).setVStr("consumer")); - expectedSpan.addToTags(new Tag("otel.status_code", TagType.STRING).setVStr("OK")); - expectedSpan.addToTags( - new Tag("otel.scope.name", TagType.STRING).setVStr("io.opentelemetry.auto")); - expectedSpan.addToTags( - new Tag("otel.library.name", TagType.STRING).setVStr("io.opentelemetry.auto")); - expectedSpan.addToTags(new Tag("otel.scope.version", TagType.STRING).setVStr("1.0.0")); - expectedSpan.addToTags(new Tag("otel.library.version", TagType.STRING).setVStr("1.0.0")); - - List expectedSpans = Collections.singletonList(expectedSpan); - verify(thriftSender).send(expectedProcess, expectedSpans); - } - - @Test - void testExportMultipleResources() throws SenderException, UnknownHostException { - SpanData span = - testSpanData( - Resource.create( - Attributes.of( - stringKey("service.name"), - "myServiceName1", - stringKey("resource-attr-key-1"), - "resource-attr-value-1")), - "GET /api/endpoint/1", - SPAN_CONTEXT, - SpanContext.getInvalid()); - - SpanData span2 = - testSpanData( - Resource.create( - Attributes.of( - stringKey("service.name"), - "myServiceName2", - stringKey("resource-attr-key-2"), - "resource-attr-value-2")), - "GET /api/endpoint/2", - SPAN_CONTEXT_2, - SpanContext.getInvalid()); - - // test - CompletableResultCode result = exporter.export(Arrays.asList(span, span2)); - result.join(1, TimeUnit.SECONDS); - assertThat(result.isSuccess()).isEqualTo(true); - - // verify - Process expectedProcess1 = new Process("myServiceName1"); - expectedProcess1.addToTags( - new Tag("jaeger.version", TagType.STRING).setVStr("opentelemetry-java")); - expectedProcess1.addToTags( - new Tag("ip", TagType.STRING).setVStr(InetAddress.getLocalHost().getHostAddress())); - expectedProcess1.addToTags( - new Tag("hostname", TagType.STRING).setVStr(InetAddress.getLocalHost().getHostName())); - expectedProcess1.addToTags( - new Tag("resource-attr-key-1", TagType.STRING).setVStr("resource-attr-value-1")); - expectedProcess1.addToTags(new Tag("service.name", TagType.STRING).setVStr("myServiceName1")); - - Process expectedProcess2 = new Process("myServiceName2"); - expectedProcess2.addToTags( - new Tag("jaeger.version", TagType.STRING).setVStr("opentelemetry-java")); - expectedProcess2.addToTags( - new Tag("ip", TagType.STRING).setVStr(InetAddress.getLocalHost().getHostAddress())); - expectedProcess2.addToTags( - new Tag("hostname", TagType.STRING).setVStr(InetAddress.getLocalHost().getHostName())); - expectedProcess2.addToTags( - new Tag("resource-attr-key-2", TagType.STRING).setVStr("resource-attr-value-2")); - expectedProcess2.addToTags(new Tag("service.name", TagType.STRING).setVStr("myServiceName2")); - - Span expectedSpan1 = - new Span() - .setTraceIdHigh(TRACE_ID_HIGH) - .setTraceIdLow(TRACE_ID_LOW) - .setSpanId(SPAN_ID_LONG) - .setOperationName("GET /api/endpoint/1") - .setReferences(Collections.emptyList()) - .setStartTime(TimeUnit.NANOSECONDS.toMicros(span.getStartEpochNanos())) - .setDuration(DURATION.toMillis() * 1000) - .setLogs(Collections.emptyList()); - expectedSpan1.addToTags(new Tag("span.kind", TagType.STRING).setVStr("consumer")); - expectedSpan1.addToTags(new Tag("otel.status_code", TagType.STRING).setVStr("OK")); - expectedSpan1.addToTags( - new Tag("otel.scope.name", TagType.STRING).setVStr("io.opentelemetry.auto")); - expectedSpan1.addToTags( - new Tag("otel.library.name", TagType.STRING).setVStr("io.opentelemetry.auto")); - expectedSpan1.addToTags(new Tag("otel.scope.version", TagType.STRING).setVStr("1.0.0")); - expectedSpan1.addToTags(new Tag("otel.library.version", TagType.STRING).setVStr("1.0.0")); - - Span expectedSpan2 = - new Span() - .setTraceIdHigh(TRACE_ID_HIGH) - .setTraceIdLow(TRACE_ID_LOW) - .setSpanId(SPAN_ID_2_LONG) - .setOperationName("GET /api/endpoint/2") - .setReferences(Collections.emptyList()) - .setStartTime(TimeUnit.NANOSECONDS.toMicros(span2.getStartEpochNanos())) - .setDuration(DURATION.toMillis() * 1000) - .setLogs(Collections.emptyList()); - expectedSpan2.addToTags(new Tag("span.kind", TagType.STRING).setVStr("consumer")); - expectedSpan2.addToTags(new Tag("otel.status_code", TagType.STRING).setVStr("OK")); - expectedSpan2.addToTags( - new Tag("otel.scope.name", TagType.STRING).setVStr("io.opentelemetry.auto")); - expectedSpan2.addToTags( - new Tag("otel.library.name", TagType.STRING).setVStr("io.opentelemetry.auto")); - expectedSpan2.addToTags(new Tag("otel.scope.version", TagType.STRING).setVStr("1.0.0")); - expectedSpan2.addToTags(new Tag("otel.library.version", TagType.STRING).setVStr("1.0.0")); - - verify(thriftSender).send(expectedProcess2, Collections.singletonList(expectedSpan2)); - verify(thriftSender).send(expectedProcess1, Collections.singletonList(expectedSpan1)); - } - - @Test - @SuppressLogger(JaegerThriftSpanExporter.class) - void shutdown() { - assertThat(exporter.shutdown().join(1, TimeUnit.SECONDS).isSuccess()).isTrue(); - assertThat(logs.getEvents()).isEmpty(); - assertThat( - exporter - .export( - Collections.singletonList( - testSpanData( - Resource.getDefault(), - "span name", - SPAN_CONTEXT, - SpanContext.getInvalid()))) - .join(10, TimeUnit.SECONDS) - .isSuccess()) - .isFalse(); - assertThat(exporter.shutdown().join(1, TimeUnit.SECONDS).isSuccess()).isTrue(); - logs.assertContains("Calling shutdown() multiple times."); - } - - private static SpanData testSpanData( - Resource resource, String spanName, SpanContext spanContext, SpanContext parentContext) { - long startMs = System.currentTimeMillis(); - long endMs = startMs + DURATION.toMillis(); - return TestSpanData.builder() - .setHasEnded(true) - .setSpanContext(spanContext) - .setParentSpanContext(parentContext) - .setName(spanName) - .setStartEpochNanos(TimeUnit.MILLISECONDS.toNanos(startMs)) - .setEndEpochNanos(TimeUnit.MILLISECONDS.toNanos(endMs)) - .setStatus(StatusData.ok()) - .setKind(SpanKind.CONSUMER) - .setLinks(Collections.emptyList()) - .setTotalRecordedLinks(0) - .setTotalRecordedEvents(0) - .setInstrumentationScopeInfo( - InstrumentationScopeInfo.builder("io.opentelemetry.auto").setVersion("1.0.0").build()) - .setResource(resource) - .build(); - } -} diff --git a/exporters/jaeger/build.gradle.kts b/exporters/jaeger/build.gradle.kts deleted file mode 100644 index f59e6d62042..00000000000 --- a/exporters/jaeger/build.gradle.kts +++ /dev/null @@ -1,48 +0,0 @@ -plugins { - id("otel.java-conventions") - id("otel.publish-conventions") - - id("otel.animalsniffer-conventions") - - id("com.squareup.wire") -} - -description = "OpenTelemetry - Jaeger Exporter" -otelJava.moduleName.set("io.opentelemetry.exporter.jaeger") - -dependencies { - api(project(":sdk:all")) - - protoSource(project(":exporters:jaeger-proto")) - - implementation(project(":exporters:common")) - implementation(project(":exporters:sender:okhttp")) - implementation(project(":sdk-extensions:autoconfigure-spi")) - - compileOnly("io.grpc:grpc-stub") - - implementation("com.fasterxml.jackson.jr:jackson-jr-objects") - - testImplementation(project(":exporters:jaeger-proto")) - - testImplementation("com.fasterxml.jackson.jr:jackson-jr-stree") - testImplementation("com.google.protobuf:protobuf-java-util") - testImplementation("com.linecorp.armeria:armeria-junit5") - testImplementation("com.linecorp.armeria:armeria-grpc-protocol") - testImplementation("com.squareup.okhttp3:okhttp") - testImplementation("org.testcontainers:junit-jupiter") - - testImplementation(project(":sdk:testing")) -} - -wire { - custom { - schemaHandlerFactoryClass = "io.opentelemetry.gradle.ProtoFieldsWireHandlerFactory" - } -} - -afterEvaluate { - tasks.getByName("generateMainProtos") { - setDependsOn(configurations.getByName("protoPath")) - } -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/BatchMarshaler.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/BatchMarshaler.java deleted file mode 100644 index 4900b7a47d9..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/BatchMarshaler.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; -import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; -import io.opentelemetry.exporter.internal.marshal.Serializer; -import io.opentelemetry.exporter.jaeger.proto.api_v2.internal.Batch; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.trace.data.SpanData; -import java.io.IOException; -import java.util.List; - -final class BatchMarshaler extends MarshalerWithSize { - - private final SpanMarshaler[] spans; - private final ProcessMarshaler process; - - static BatchMarshaler create(List spans, Resource resource) { - SpanMarshaler[] spanMarshalers = SpanMarshaler.createRepeated(spans); - ProcessMarshaler processMarshaler = ProcessMarshaler.create(resource); - return new BatchMarshaler(spanMarshalers, processMarshaler); - } - - BatchMarshaler(SpanMarshaler[] spans, ProcessMarshaler process) { - super(calculateSize(spans, process)); - this.spans = spans; - this.process = process; - } - - @Override - protected void writeTo(Serializer output) throws IOException { - output.serializeRepeatedMessage(Batch.SPANS, spans); - output.serializeMessage(Batch.PROCESS, process); - } - - private static int calculateSize(SpanMarshaler[] spans, ProcessMarshaler process) { - int size = 0; - size += MarshalerUtil.sizeRepeatedMessage(Batch.SPANS, spans); - size += MarshalerUtil.sizeMessage(Batch.PROCESS, process); - return size; - } -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/JaegerGrpcSpanExporter.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/JaegerGrpcSpanExporter.java deleted file mode 100644 index acaa3ea42ad..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/JaegerGrpcSpanExporter.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.exporter.internal.grpc.GrpcExporter; -import io.opentelemetry.sdk.common.CompletableResultCode; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.trace.data.SpanData; -import io.opentelemetry.sdk.trace.export.SpanExporter; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.stream.Collectors; -import javax.annotation.concurrent.ThreadSafe; - -/** - * Exports spans to Jaeger via gRPC, using Jaeger's protobuf model. - * - * @deprecated Use {@code OtlpGrpcSpanExporter} or {@code OtlpHttpSpanExporter} from opentelemetry-exporter-otlp - * instead. - */ -@ThreadSafe -@Deprecated -public final class JaegerGrpcSpanExporter implements SpanExporter { - - private static final String DEFAULT_HOST_NAME = "unknown"; - private static final AttributeKey CLIENT_VERSION_KEY = - AttributeKey.stringKey("jaeger.version"); - private static final String CLIENT_VERSION_VALUE = "opentelemetry-java"; - private static final AttributeKey HOSTNAME_KEY = AttributeKey.stringKey("hostname"); - private static final String IP_DEFAULT = "0.0.0.0"; - // Visible for testing - static final AttributeKey IP_KEY = AttributeKey.stringKey("ip"); - - private final GrpcExporter delegate; - - // Jaeger-specific resource information - private final Resource jaegerResource; - - JaegerGrpcSpanExporter(GrpcExporter delegate) { - this.delegate = delegate; - - String hostname; - String ipv4; - - try { - hostname = InetAddress.getLocalHost().getHostName(); - ipv4 = InetAddress.getLocalHost().getHostAddress(); - } catch (UnknownHostException e) { - hostname = DEFAULT_HOST_NAME; - ipv4 = IP_DEFAULT; - } - - jaegerResource = - Resource.builder() - .put(CLIENT_VERSION_KEY, CLIENT_VERSION_VALUE) - .put(IP_KEY, ipv4) - .put(HOSTNAME_KEY, hostname) - .build(); - } - - /** - * Submits all the given spans in a single batch to the Jaeger collector. - * - * @param spans the list of sampled Spans to be exported. - * @return the result of the operation - */ - @Override - public CompletableResultCode export(Collection spans) { - List results = new ArrayList<>(); - spans.stream() - .collect(Collectors.groupingBy(SpanData::getResource)) - .forEach( - (resource, spanData) -> - results.add(delegate.export(buildRequest(resource, spanData), spanData.size()))); - - return CompletableResultCode.ofAll(results); - } - - private PostSpansRequestMarshaler buildRequest(Resource resource, List spans) { - Resource mergedResource = jaegerResource.merge(resource); - return PostSpansRequestMarshaler.create(spans, mergedResource); - } - - /** - * The Jaeger exporter does not batch spans, so this method will immediately return with success. - * - * @return always Success - */ - @Override - public CompletableResultCode flush() { - return CompletableResultCode.ofSuccess(); - } - - /** - * Returns a new builder instance for this exporter. - * - * @return a new builder instance for this exporter. - */ - public static JaegerGrpcSpanExporterBuilder builder() { - return new JaegerGrpcSpanExporterBuilder(); - } - - /** - * Initiates an orderly shutdown in which preexisting calls continue but new calls are immediately - * cancelled. - */ - @Override - public CompletableResultCode shutdown() { - return delegate.shutdown(); - } - - // Visible for testing - Resource getJaegerResource() { - return jaegerResource; - } -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/JaegerGrpcSpanExporterBuilder.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/JaegerGrpcSpanExporterBuilder.java deleted file mode 100644 index e67311dc7e5..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/JaegerGrpcSpanExporterBuilder.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import static io.opentelemetry.api.internal.Utils.checkArgument; -import static java.util.Objects.requireNonNull; - -import io.grpc.ManagedChannel; -import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.api.metrics.MeterProvider; -import io.opentelemetry.exporter.internal.grpc.GrpcExporterBuilder; -import java.net.URI; -import java.time.Duration; -import java.util.concurrent.TimeUnit; -import javax.net.ssl.SSLContext; -import javax.net.ssl.X509TrustManager; - -/** - * Builder utility for this exporter. - * - * @deprecated Use {@code OtlpGrpcSpanExporter} or {@code OtlpHttpSpanExporter} from opentelemetry-exporter-otlp - * instead. - */ -@Deprecated -public final class JaegerGrpcSpanExporterBuilder { - - private static final String GRPC_SERVICE_NAME = "jaeger.api_v2.CollectorService"; - - // Visible for testing - static final String GRPC_ENDPOINT_PATH = "/" + GRPC_SERVICE_NAME + "/PostSpans"; - - private static final String DEFAULT_ENDPOINT_URL = "http://localhost:14250"; - private static final URI DEFAULT_ENDPOINT = URI.create(DEFAULT_ENDPOINT_URL); - private static final long DEFAULT_TIMEOUT_SECS = 10; - - private final GrpcExporterBuilder delegate; - - JaegerGrpcSpanExporterBuilder() { - delegate = - new GrpcExporterBuilder<>( - "jaeger", - "span", - DEFAULT_TIMEOUT_SECS, - DEFAULT_ENDPOINT, - () -> MarshalerCollectorServiceGrpc::newFutureStub, - GRPC_ENDPOINT_PATH); - } - - /** - * Sets the managed channel to use when communicating with the backend. Takes precedence over - * {@link #setEndpoint(String)} if both are called. - * - * @param channel the channel to use. - * @return this. - * @deprecated Use {@link #setEndpoint(String)}. If you have a use case not satisfied by the - * methods on this builder, please file an issue to let us know what it is. - */ - @Deprecated - public JaegerGrpcSpanExporterBuilder setChannel(ManagedChannel channel) { - delegate.setChannel(channel); - return this; - } - - /** - * Sets the Jaeger endpoint to connect to. If unset, defaults to {@value DEFAULT_ENDPOINT_URL}. - * The endpoint must start with either http:// or https://. - */ - public JaegerGrpcSpanExporterBuilder setEndpoint(String endpoint) { - requireNonNull(endpoint, "endpoint"); - delegate.setEndpoint(endpoint); - return this; - } - - /** - * Sets the method used to compress payloads. If unset, compression is disabled. Currently - * supported compression methods include "gzip" and "none". - * - * @since 1.20.0 - */ - public JaegerGrpcSpanExporterBuilder setCompression(String compressionMethod) { - requireNonNull(compressionMethod, "compressionMethod"); - checkArgument( - compressionMethod.equals("gzip") || compressionMethod.equals("none"), - "Unsupported compression method. Supported compression methods include: gzip, none."); - delegate.setCompression(compressionMethod); - return this; - } - - /** - * Sets the maximum time to wait for the collector to process an exported batch of spans. If - * unset, defaults to {@value DEFAULT_TIMEOUT_SECS}s. - */ - public JaegerGrpcSpanExporterBuilder setTimeout(long timeout, TimeUnit unit) { - requireNonNull(unit, "unit"); - checkArgument(timeout >= 0, "timeout must be non-negative"); - delegate.setTimeout(timeout, unit); - return this; - } - - /** - * Sets the maximum time to wait for the collector to process an exported batch of spans. If - * unset, defaults to {@value DEFAULT_TIMEOUT_SECS}s. - */ - public JaegerGrpcSpanExporterBuilder setTimeout(Duration timeout) { - requireNonNull(timeout, "timeout"); - delegate.setTimeout(timeout); - return this; - } - - /** - * Sets the certificate chain to use for verifying servers when TLS is enabled. The {@code byte[]} - * should contain an X.509 certificate collection in PEM format. If not set, TLS connections will - * use the system default trusted certificates. - */ - public JaegerGrpcSpanExporterBuilder setTrustedCertificates(byte[] trustedCertificatesPem) { - delegate.setTrustManagerFromCerts(trustedCertificatesPem); - return this; - } - - /** Sets the client key and chain to use for verifying servers when mTLS is enabled. */ - public JaegerGrpcSpanExporterBuilder setClientTls(byte[] privateKeyPem, byte[] certificatePem) { - delegate.setKeyManagerFromCerts(privateKeyPem, certificatePem); - return this; - } - - /** - * Sets the "bring-your-own" SSLContext for use with TLS. Users should call this _or_ set raw - * certificate bytes, but not both. - */ - public JaegerGrpcSpanExporterBuilder setSslContext( - SSLContext sslContext, X509TrustManager trustManager) { - delegate.setSslContext(sslContext, trustManager); - return this; - } - - /** - * Sets the {@link MeterProvider} to use to collect metrics related to export. If not set, uses - * {@link GlobalOpenTelemetry#getMeterProvider()}. - * - * @since 1.15.0 - */ - public JaegerGrpcSpanExporterBuilder setMeterProvider(MeterProvider meterProvider) { - requireNonNull(meterProvider, "meterProvider"); - delegate.setMeterProvider(meterProvider); - return this; - } - - /** - * Constructs a new instance of the exporter based on the builder's values. - * - * @return a new exporter's instance. - */ - public JaegerGrpcSpanExporter build() { - return new JaegerGrpcSpanExporter(delegate.build()); - } -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/KeyValueMarshaler.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/KeyValueMarshaler.java deleted file mode 100644 index 09f8b2f5734..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/KeyValueMarshaler.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import com.fasterxml.jackson.jr.ob.JSON; -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; -import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; -import io.opentelemetry.exporter.internal.marshal.ProtoEnumInfo; -import io.opentelemetry.exporter.internal.marshal.Serializer; -import io.opentelemetry.exporter.jaeger.proto.api_v2.internal.KeyValue; -import io.opentelemetry.exporter.jaeger.proto.api_v2.internal.ValueType; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -@SuppressWarnings({ - "checkstyle:LocalVariableName", - "checkstyle:MemberName", - "checkstyle:ParameterName", -}) -final class KeyValueMarshaler extends MarshalerWithSize { - - private static final byte[] EMPTY_BYTES = new byte[0]; - - private final byte[] keyUtf8; - private final ProtoEnumInfo valueType; - private final byte[] vStrUtf8; - private final boolean vBool; - private final long vInt64; - private final double vFloat64; - - static List createRepeated(Attributes attributes) { - if (attributes.isEmpty()) { - return new ArrayList<>(); - } - - List marshalers = new ArrayList<>(attributes.size()); - attributes.forEach((attributeKey, o) -> marshalers.add(create(attributeKey, o))); - return marshalers; - } - - static KeyValueMarshaler create(AttributeKey key, Object value) { - byte[] keyUtf8 = MarshalerUtil.toBytes(key.getKey()); - - // Default is the 0 value, string in this case - ProtoEnumInfo valueType = ValueType.STRING; - byte[] vStrUtf8 = EMPTY_BYTES; - boolean vBool = false; - long vInt64 = 0; - double vFloat64 = 0; - - switch (key.getType()) { - case STRING: - valueType = ValueType.STRING; - vStrUtf8 = MarshalerUtil.toBytes(((String) value)); - break; - case BOOLEAN: - valueType = ValueType.BOOL; - vBool = (boolean) value; - break; - case LONG: - valueType = ValueType.INT64; - vInt64 = (long) value; - break; - case DOUBLE: - valueType = ValueType.FLOAT64; - vFloat64 = (double) value; - break; - case STRING_ARRAY: - case BOOLEAN_ARRAY: - case LONG_ARRAY: - case DOUBLE_ARRAY: - valueType = ValueType.STRING; - try { - vStrUtf8 = JSON.std.asBytes(value); - } catch (IOException e) { - // Can't happen, just ignore it. - } - break; - } - - return new KeyValueMarshaler(keyUtf8, valueType, vStrUtf8, vBool, vInt64, vFloat64); - } - - KeyValueMarshaler( - byte[] keyUtf8, - ProtoEnumInfo valueType, - byte[] vStrUtf8, - boolean vBool, - long vInt64, - double vFloat64) { - super(calculateSize(keyUtf8, valueType, vStrUtf8, vBool, vInt64, vFloat64)); - this.keyUtf8 = keyUtf8; - this.valueType = valueType; - this.vStrUtf8 = vStrUtf8; - this.vBool = vBool; - this.vInt64 = vInt64; - this.vFloat64 = vFloat64; - } - - @Override - protected void writeTo(Serializer output) throws IOException { - output.serializeString(KeyValue.KEY, keyUtf8); - output.serializeEnum(KeyValue.V_TYPE, valueType); - output.serializeString(KeyValue.V_STR, vStrUtf8); - output.serializeBool(KeyValue.V_BOOL, vBool); - output.serializeInt64(KeyValue.V_INT64, vInt64); - output.serializeDouble(KeyValue.V_FLOAT64, vFloat64); - } - - private static int calculateSize( - byte[] keyUtf8, - ProtoEnumInfo valueType, - byte[] vStrUtf8, - boolean vBool, - long vInt64, - double vFloat64) { - int size = 0; - size += MarshalerUtil.sizeBytes(KeyValue.KEY, keyUtf8); - size += MarshalerUtil.sizeEnum(KeyValue.V_TYPE, valueType); - size += MarshalerUtil.sizeBytes(KeyValue.V_STR, vStrUtf8); - size += MarshalerUtil.sizeBool(KeyValue.V_BOOL, vBool); - size += MarshalerUtil.sizeInt64(KeyValue.V_INT64, vInt64); - size += MarshalerUtil.sizeDouble(KeyValue.V_FLOAT64, vFloat64); - return size; - } -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/LogMarshaler.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/LogMarshaler.java deleted file mode 100644 index be871489f0b..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/LogMarshaler.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; -import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; -import io.opentelemetry.exporter.internal.marshal.Serializer; -import io.opentelemetry.exporter.jaeger.proto.api_v2.internal.Log; -import io.opentelemetry.sdk.trace.data.EventData; -import java.io.IOException; -import java.util.List; - -final class LogMarshaler extends MarshalerWithSize { - - private static final AttributeKey KEY_LOG_EVENT = AttributeKey.stringKey("event"); - private static final AttributeKey KEY_EVENT_DROPPED_ATTRIBUTES_COUNT = - AttributeKey.longKey("otel.event.dropped_attributes_count"); - - private final TimeMarshaler timestamp; - private final List fields; - - static LogMarshaler[] createRepeated(List events) { - int len = events.size(); - LogMarshaler[] marshalers = new LogMarshaler[len]; - for (int i = 0; i < len; i++) { - marshalers[i] = create(events.get(i)); - } - return marshalers; - } - - static LogMarshaler create(EventData event) { - TimeMarshaler timestamp = TimeMarshaler.create(event.getEpochNanos()); - - List fields = KeyValueMarshaler.createRepeated(event.getAttributes()); - - // name is a top-level property in OpenTelemetry - fields.add(KeyValueMarshaler.create(KEY_LOG_EVENT, event.getName())); - - int droppedAttributesCount = event.getDroppedAttributesCount(); - if (droppedAttributesCount > 0) { - fields.add( - KeyValueMarshaler.create( - KEY_EVENT_DROPPED_ATTRIBUTES_COUNT, (long) droppedAttributesCount)); - } - - return new LogMarshaler(timestamp, fields); - } - - LogMarshaler(TimeMarshaler timestamp, List fields) { - super(calculateSize(timestamp, fields)); - this.timestamp = timestamp; - this.fields = fields; - } - - @Override - protected void writeTo(Serializer output) throws IOException { - output.serializeMessage(Log.TIMESTAMP, timestamp); - output.serializeRepeatedMessage(Log.FIELDS, fields); - } - - private static int calculateSize(TimeMarshaler timestamp, List fields) { - int size = 0; - size += MarshalerUtil.sizeMessage(Log.TIMESTAMP, timestamp); - size += MarshalerUtil.sizeRepeatedMessage(Log.FIELDS, fields); - return size; - } -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/MarshalerCollectorServiceGrpc.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/MarshalerCollectorServiceGrpc.java deleted file mode 100644 index 7eb71fb71d1..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/MarshalerCollectorServiceGrpc.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import static io.grpc.MethodDescriptor.generateFullMethodName; - -import com.google.common.util.concurrent.ListenableFuture; -import io.grpc.CallOptions; -import io.grpc.Channel; -import io.grpc.MethodDescriptor; -import io.grpc.stub.ClientCalls; -import io.opentelemetry.exporter.internal.grpc.MarshalerInputStream; -import io.opentelemetry.exporter.internal.grpc.MarshalerServiceStub; -import java.io.InputStream; -import javax.annotation.Nullable; - -// Adapted from the protoc generated code for CollectorServiceGrpc. -final class MarshalerCollectorServiceGrpc { - - private static final String SERVICE_NAME = "jaeger.api_v2.CollectorService"; - - private static final MethodDescriptor.Marshaller REQUEST_MARSHALLER = - new MethodDescriptor.Marshaller() { - @Override - public InputStream stream(PostSpansRequestMarshaler value) { - return new MarshalerInputStream(value); - } - - @Override - public PostSpansRequestMarshaler parse(InputStream stream) { - throw new UnsupportedOperationException("Only for serializing"); - } - }; - - private static final MethodDescriptor.Marshaller RESPONSE_MARSHALER = - new MethodDescriptor.Marshaller() { - @Override - public InputStream stream(PostSpansResponse value) { - throw new UnsupportedOperationException("Only for parsing"); - } - - @Override - public PostSpansResponse parse(InputStream stream) { - return PostSpansResponse.INSTANCE; - } - }; - - private static final MethodDescriptor - getPostSpansMethod = - MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.UNARY) - .setFullMethodName(generateFullMethodName(SERVICE_NAME, "PostSpans")) - .setRequestMarshaller(REQUEST_MARSHALLER) - .setResponseMarshaller(RESPONSE_MARSHALER) - .build(); - - static CollectorServiceFutureStub newFutureStub( - Channel channel, @Nullable String authorityOverride) { - return CollectorServiceFutureStub.newStub( - (c, options) -> new CollectorServiceFutureStub(c, options.withAuthority(authorityOverride)), - channel); - } - - static final class CollectorServiceFutureStub - extends MarshalerServiceStub< - PostSpansRequestMarshaler, PostSpansResponse, CollectorServiceFutureStub> { - private CollectorServiceFutureStub(Channel channel, CallOptions callOptions) { - super(channel, callOptions); - } - - @Override - protected MarshalerCollectorServiceGrpc.CollectorServiceFutureStub build( - Channel channel, CallOptions callOptions) { - return new MarshalerCollectorServiceGrpc.CollectorServiceFutureStub(channel, callOptions); - } - - @Override - public ListenableFuture export(PostSpansRequestMarshaler request) { - return ClientCalls.futureUnaryCall( - getChannel().newCall(getPostSpansMethod, getCallOptions()), request); - } - } - - private MarshalerCollectorServiceGrpc() {} -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/PostSpansRequestMarshaler.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/PostSpansRequestMarshaler.java deleted file mode 100644 index 69347f82c80..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/PostSpansRequestMarshaler.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; -import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; -import io.opentelemetry.exporter.internal.marshal.Serializer; -import io.opentelemetry.exporter.jaeger.proto.api_v2.internal.PostSpansRequest; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.trace.data.SpanData; -import java.io.IOException; -import java.util.List; - -final class PostSpansRequestMarshaler extends MarshalerWithSize { - - private final BatchMarshaler batch; - - static PostSpansRequestMarshaler create(List spans, Resource resource) { - return new PostSpansRequestMarshaler(BatchMarshaler.create(spans, resource)); - } - - PostSpansRequestMarshaler(BatchMarshaler batch) { - super(calculateSize(batch)); - this.batch = batch; - } - - @Override - protected void writeTo(Serializer output) throws IOException { - output.serializeMessage(PostSpansRequest.BATCH, batch); - } - - private static int calculateSize(BatchMarshaler batch) { - int size = 0; - size += MarshalerUtil.sizeMessage(PostSpansRequest.BATCH, batch); - return size; - } -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/PostSpansResponse.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/PostSpansResponse.java deleted file mode 100644 index 3daa41ce560..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/PostSpansResponse.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -// A Java object to correspond to the gRPC response for the Collector.PostSpans method. If fields -// are added to the type in the future, this can be converted to an actual class. -// -// It may seem like Void could be used instead but gRPC does not allow response values to be -// null. -enum PostSpansResponse { - INSTANCE; -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/ProcessMarshaler.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/ProcessMarshaler.java deleted file mode 100644 index e50027a894c..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/ProcessMarshaler.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; -import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; -import io.opentelemetry.exporter.internal.marshal.Serializer; -import io.opentelemetry.exporter.jaeger.proto.api_v2.internal.Process; -import io.opentelemetry.sdk.resources.Resource; -import java.io.IOException; -import java.util.List; - -final class ProcessMarshaler extends MarshalerWithSize { - - private static final AttributeKey SERVICE_NAME = AttributeKey.stringKey("service.name"); - - private final byte[] serviceNameUtf8; - private final List tags; - - static ProcessMarshaler create(Resource resource) { - String serviceName = resource.getAttribute(SERVICE_NAME); - if (serviceName == null || serviceName.isEmpty()) { - serviceName = Resource.getDefault().getAttribute(SERVICE_NAME); - } - - return new ProcessMarshaler( - MarshalerUtil.toBytes(serviceName), - KeyValueMarshaler.createRepeated(resource.getAttributes())); - } - - ProcessMarshaler(byte[] serviceNameUtf8, List tags) { - super(calculateSize(serviceNameUtf8, tags)); - this.serviceNameUtf8 = serviceNameUtf8; - this.tags = tags; - } - - @Override - protected void writeTo(Serializer output) throws IOException { - output.serializeString(Process.SERVICE_NAME, serviceNameUtf8); - output.serializeRepeatedMessage(Process.TAGS, tags); - } - - private static int calculateSize(byte[] serviceNameUtf8, List tags) { - int size = 0; - size += MarshalerUtil.sizeBytes(Process.SERVICE_NAME, serviceNameUtf8); - size += MarshalerUtil.sizeRepeatedMessage(Process.TAGS, tags); - return size; - } -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/SpanMarshaler.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/SpanMarshaler.java deleted file mode 100644 index 40f2457eecf..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/SpanMarshaler.java +++ /dev/null @@ -1,186 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import static io.opentelemetry.api.common.AttributeKey.booleanKey; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.trace.SpanContext; -import io.opentelemetry.api.trace.SpanKind; -import io.opentelemetry.api.trace.StatusCode; -import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; -import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; -import io.opentelemetry.exporter.internal.marshal.Serializer; -import io.opentelemetry.exporter.jaeger.proto.api_v2.internal.Span; -import io.opentelemetry.sdk.trace.data.SpanData; -import java.io.IOException; -import java.util.List; -import java.util.Locale; - -final class SpanMarshaler extends MarshalerWithSize { - - private static final AttributeKey KEY_ERROR = booleanKey("error"); - private static final AttributeKey KEY_DROPPED_ATTRIBUTES_COUNT = - AttributeKey.longKey("otel.dropped_attributes_count"); - private static final AttributeKey KEY_DROPPED_EVENTS_COUNT = - AttributeKey.longKey("otel.dropped_events_count"); - private static final AttributeKey KEY_SPAN_KIND = AttributeKey.stringKey("span.kind"); - private static final AttributeKey KEY_SPAN_STATUS_MESSAGE = - AttributeKey.stringKey("otel.status_description"); - private static final AttributeKey KEY_SPAN_STATUS_CODE = - AttributeKey.stringKey("otel.status_code"); - private static final AttributeKey KEY_INSTRUMENTATION_SCOPE_NAME = - AttributeKey.stringKey("otel.scope.name"); - private static final AttributeKey KEY_INSTRUMENTATION_SCOPE_VERSION = - AttributeKey.stringKey("otel.scope.version"); - private static final AttributeKey KEY_INSTRUMENTATION_LIBRARY_NAME = - AttributeKey.stringKey("otel.library.name"); - private static final AttributeKey KEY_INSTRUMENTATION_LIBRARY_VERSION = - AttributeKey.stringKey("otel.library.version"); - - private final String traceId; - private final String spanId; - private final byte[] operationNameUtf8; - private final TimeMarshaler startTime; - private final TimeMarshaler duration; - private final List tags; - private final LogMarshaler[] logs; - private final List references; - - static SpanMarshaler[] createRepeated(List spans) { - int len = spans.size(); - SpanMarshaler[] marshalers = new SpanMarshaler[len]; - for (int i = 0; i < len; i++) { - marshalers[i] = SpanMarshaler.create(spans.get(i)); - } - return marshalers; - } - - static SpanMarshaler create(SpanData span) { - String traceId = span.getSpanContext().getTraceId(); - String spanId = span.getSpanContext().getSpanId(); - byte[] operationNameUtf8 = MarshalerUtil.toBytes(span.getName()); - TimeMarshaler startTime = TimeMarshaler.create(span.getStartEpochNanos()); - TimeMarshaler duration = - TimeMarshaler.create(span.getEndEpochNanos() - span.getStartEpochNanos()); - - List tags = KeyValueMarshaler.createRepeated(span.getAttributes()); - int droppedAttributes = span.getTotalAttributeCount() - span.getAttributes().size(); - if (droppedAttributes > 0) { - tags.add(KeyValueMarshaler.create(KEY_DROPPED_ATTRIBUTES_COUNT, (long) droppedAttributes)); - } - - LogMarshaler[] logs = LogMarshaler.createRepeated(span.getEvents()); - int droppedEvents = span.getTotalRecordedEvents() - span.getEvents().size(); - if (droppedEvents > 0) { - tags.add(KeyValueMarshaler.create(KEY_DROPPED_EVENTS_COUNT, (long) droppedEvents)); - } - - List references = SpanRefMarshaler.createRepeated(span.getLinks()); - - // add the parent span - SpanContext parentSpanContext = span.getParentSpanContext(); - if (parentSpanContext.isValid()) { - references.add(SpanRefMarshaler.create(parentSpanContext)); - } - - if (span.getKind() != SpanKind.INTERNAL) { - tags.add( - KeyValueMarshaler.create(KEY_SPAN_KIND, span.getKind().name().toLowerCase(Locale.ROOT))); - } - - if (!span.getStatus().getDescription().isEmpty()) { - tags.add( - KeyValueMarshaler.create(KEY_SPAN_STATUS_MESSAGE, span.getStatus().getDescription())); - } - - if (span.getStatus().getStatusCode() != StatusCode.UNSET) { - tags.add( - KeyValueMarshaler.create(KEY_SPAN_STATUS_CODE, span.getStatus().getStatusCode().name())); - } - - tags.add( - KeyValueMarshaler.create( - KEY_INSTRUMENTATION_SCOPE_NAME, span.getInstrumentationScopeInfo().getName())); - // Include instrumentation library name for backwards compatibility - tags.add( - KeyValueMarshaler.create( - KEY_INSTRUMENTATION_LIBRARY_NAME, span.getInstrumentationScopeInfo().getName())); - - if (span.getInstrumentationScopeInfo().getVersion() != null) { - tags.add( - KeyValueMarshaler.create( - KEY_INSTRUMENTATION_SCOPE_VERSION, span.getInstrumentationScopeInfo().getVersion())); - // Include instrumentation library name for backwards compatibility - tags.add( - KeyValueMarshaler.create( - KEY_INSTRUMENTATION_LIBRARY_VERSION, - span.getInstrumentationScopeInfo().getVersion())); - } - - if (span.getStatus().getStatusCode() == StatusCode.ERROR) { - tags.add(KeyValueMarshaler.create(KEY_ERROR, true)); - } - - return new SpanMarshaler( - traceId, spanId, operationNameUtf8, startTime, duration, tags, logs, references); - } - - SpanMarshaler( - String traceId, - String spanId, - byte[] operationNameUtf8, - TimeMarshaler startTime, - TimeMarshaler duration, - List tags, - LogMarshaler[] logs, - List references) { - super( - calculateSize( - traceId, spanId, operationNameUtf8, startTime, duration, tags, logs, references)); - this.traceId = traceId; - this.spanId = spanId; - this.operationNameUtf8 = operationNameUtf8; - this.startTime = startTime; - this.duration = duration; - this.tags = tags; - this.logs = logs; - this.references = references; - } - - @Override - protected void writeTo(Serializer output) throws IOException { - output.serializeTraceId(Span.TRACE_ID, traceId); - output.serializeSpanId(Span.SPAN_ID, spanId); - output.serializeString(Span.OPERATION_NAME, operationNameUtf8); - output.serializeMessage(Span.START_TIME, startTime); - output.serializeMessage(Span.DURATION, duration); - output.serializeRepeatedMessage(Span.TAGS, tags); - output.serializeRepeatedMessage(Span.LOGS, logs); - output.serializeRepeatedMessage(Span.REFERENCES, references); - } - - private static int calculateSize( - String traceId, - String spanId, - byte[] operationNameUtf8, - TimeMarshaler startTime, - TimeMarshaler duration, - List tags, - LogMarshaler[] logs, - List references) { - int size = 0; - size += MarshalerUtil.sizeTraceId(Span.TRACE_ID, traceId); - size += MarshalerUtil.sizeSpanId(Span.SPAN_ID, spanId); - size += MarshalerUtil.sizeBytes(Span.OPERATION_NAME, operationNameUtf8); - size += MarshalerUtil.sizeMessage(Span.START_TIME, startTime); - size += MarshalerUtil.sizeMessage(Span.DURATION, duration); - size += MarshalerUtil.sizeRepeatedMessage(Span.TAGS, tags); - size += MarshalerUtil.sizeRepeatedMessage(Span.LOGS, logs); - size += MarshalerUtil.sizeRepeatedMessage(Span.REFERENCES, references); - return size; - } -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/SpanRefMarshaler.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/SpanRefMarshaler.java deleted file mode 100644 index a6dd61a418a..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/SpanRefMarshaler.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import io.opentelemetry.api.trace.SpanContext; -import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; -import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; -import io.opentelemetry.exporter.internal.marshal.ProtoEnumInfo; -import io.opentelemetry.exporter.internal.marshal.Serializer; -import io.opentelemetry.exporter.jaeger.proto.api_v2.internal.SpanRef; -import io.opentelemetry.exporter.jaeger.proto.api_v2.internal.SpanRefType; -import io.opentelemetry.sdk.trace.data.LinkData; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -final class SpanRefMarshaler extends MarshalerWithSize { - - private final String traceId; - private final String spanId; - private final ProtoEnumInfo refType; - - static List createRepeated(List links) { - List marshalers = new ArrayList<>(links.size()); - for (LinkData link : links) { - // we can assume that all links are *follows from* - // https://github.com/open-telemetry/opentelemetry-java/issues/475 - // https://github.com/open-telemetry/opentelemetry-java/pull/481/files#r312577862 - marshalers.add(create(link)); - ; - } - return marshalers; - } - - static SpanRefMarshaler create(SpanContext spanContext) { - return new SpanRefMarshaler( - spanContext.getTraceId(), spanContext.getSpanId(), SpanRefType.CHILD_OF); - } - - static SpanRefMarshaler create(LinkData link) { - return new SpanRefMarshaler( - link.getSpanContext().getTraceId(), - link.getSpanContext().getSpanId(), - SpanRefType.FOLLOWS_FROM); - } - - SpanRefMarshaler(String traceId, String spanId, ProtoEnumInfo refType) { - super(calculateSize(traceId, spanId, refType)); - this.traceId = traceId; - this.spanId = spanId; - this.refType = refType; - } - - @Override - protected void writeTo(Serializer output) throws IOException { - output.serializeTraceId(SpanRef.TRACE_ID, traceId); - output.serializeSpanId(SpanRef.SPAN_ID, spanId); - output.serializeEnum(SpanRef.REF_TYPE, refType); - } - - private static int calculateSize(String traceId, String spanId, ProtoEnumInfo refType) { - int size = 0; - size += MarshalerUtil.sizeTraceId(SpanRef.TRACE_ID, traceId); - size += MarshalerUtil.sizeSpanId(SpanRef.SPAN_ID, spanId); - size += MarshalerUtil.sizeEnum(SpanRef.REF_TYPE, refType); - return size; - } -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/TimeMarshaler.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/TimeMarshaler.java deleted file mode 100644 index 961e5461133..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/TimeMarshaler.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; -import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; -import io.opentelemetry.exporter.internal.marshal.Serializer; -import io.opentelemetry.exporter.jaeger.internal.protobuf.internal.Time; -import java.io.IOException; -import java.util.concurrent.TimeUnit; - -// The wire format for Timestamp and Duration are exactly the same. Just implement one Marshaler -// for them. -final class TimeMarshaler extends MarshalerWithSize { - private static final long NANOS_PER_SECOND = TimeUnit.SECONDS.toNanos(1); - - private final long seconds; - private final int nanos; - - static TimeMarshaler create(long timeNanos) { - long seconds = timeNanos / NANOS_PER_SECOND; - int nanos = (int) (timeNanos % NANOS_PER_SECOND); - return new TimeMarshaler(seconds, nanos); - } - - TimeMarshaler(long seconds, int nanos) { - super(calculateSize(seconds, nanos)); - this.seconds = seconds; - this.nanos = nanos; - } - - @Override - protected void writeTo(Serializer output) throws IOException { - output.serializeInt64(Time.SECONDS, seconds); - output.serializeInt32(Time.NANOS, nanos); - } - - private static int calculateSize(long seconds, int nanos) { - int size = 0; - size += MarshalerUtil.sizeInt64(Time.SECONDS, seconds); - size += MarshalerUtil.sizeInt32(Time.NANOS, nanos); - return size; - } -} diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/internal/JaegerGrpcSpanExporterProvider.java b/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/internal/JaegerGrpcSpanExporterProvider.java deleted file mode 100644 index bf3ba40b477..00000000000 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/internal/JaegerGrpcSpanExporterProvider.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger.internal; - -import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider; -import io.opentelemetry.sdk.trace.export.SpanExporter; -import java.time.Duration; - -/** - * {@link SpanExporter} SPI implementation for {@link - * io.opentelemetry.exporter.jaeger.JaegerGrpcSpanExporter}. - * - *

This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time. - * - * @deprecated Use {@code OtlpGrpcSpanExporter} or {@code OtlpHttpSpanExporter} from opentelemetry-exporter-otlp - * instead. - */ -@Deprecated -public class JaegerGrpcSpanExporterProvider implements ConfigurableSpanExporterProvider { - @Override - public String getName() { - return "jaeger"; - } - - @Override - public SpanExporter createExporter(ConfigProperties config) { - io.opentelemetry.exporter.jaeger.JaegerGrpcSpanExporterBuilder builder = - io.opentelemetry.exporter.jaeger.JaegerGrpcSpanExporter.builder(); - - String endpoint = config.getString("otel.exporter.jaeger.endpoint"); - if (endpoint != null) { - builder.setEndpoint(endpoint); - } - - Duration timeout = config.getDuration("otel.exporter.jaeger.timeout"); - if (timeout != null) { - builder.setTimeout(timeout); - } - - return builder.build(); - } -} diff --git a/exporters/jaeger/src/main/proto/README.md b/exporters/jaeger/src/main/proto/README.md deleted file mode 100644 index f36259bdd21..00000000000 --- a/exporters/jaeger/src/main/proto/README.md +++ /dev/null @@ -1 +0,0 @@ -Non-empty folder required for wire proto compiler. diff --git a/exporters/jaeger/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider b/exporters/jaeger/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider deleted file mode 100644 index 4c94f1676f8..00000000000 --- a/exporters/jaeger/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider +++ /dev/null @@ -1 +0,0 @@ -io.opentelemetry.exporter.jaeger.internal.JaegerGrpcSpanExporterProvider diff --git a/exporters/jaeger/src/test/java/io/opentelemetry/exporter/jaeger/JaegerGrpcSpanExporterTest.java b/exporters/jaeger/src/test/java/io/opentelemetry/exporter/jaeger/JaegerGrpcSpanExporterTest.java deleted file mode 100644 index 2889f9ab972..00000000000 --- a/exporters/jaeger/src/test/java/io/opentelemetry/exporter/jaeger/JaegerGrpcSpanExporterTest.java +++ /dev/null @@ -1,427 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatCode; -import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.assertj.core.api.Assertions.fail; - -import com.google.protobuf.InvalidProtocolBufferException; -import com.linecorp.armeria.server.ServerBuilder; -import com.linecorp.armeria.server.ServiceRequestContext; -import com.linecorp.armeria.server.grpc.protocol.AbstractUnaryGrpcService; -import com.linecorp.armeria.testing.junit5.server.SelfSignedCertificateExtension; -import com.linecorp.armeria.testing.junit5.server.ServerExtension; -import io.github.netmikey.logunit.api.LogCapturer; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.metrics.MeterProvider; -import io.opentelemetry.api.trace.SpanContext; -import io.opentelemetry.api.trace.SpanId; -import io.opentelemetry.api.trace.SpanKind; -import io.opentelemetry.api.trace.TraceFlags; -import io.opentelemetry.api.trace.TraceId; -import io.opentelemetry.api.trace.TraceState; -import io.opentelemetry.exporter.internal.TlsUtil; -import io.opentelemetry.exporter.internal.grpc.GrpcExporter; -import io.opentelemetry.exporter.jaeger.proto.api_v2.Collector; -import io.opentelemetry.exporter.jaeger.proto.api_v2.Model; -import io.opentelemetry.internal.testing.slf4j.SuppressLogger; -import io.opentelemetry.sdk.common.CompletableResultCode; -import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.testing.trace.TestSpanData; -import io.opentelemetry.sdk.trace.IdGenerator; -import io.opentelemetry.sdk.trace.data.SpanData; -import io.opentelemetry.sdk.trace.data.StatusData; -import java.net.InetAddress; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; -import java.util.concurrent.LinkedBlockingDeque; -import java.util.concurrent.TimeUnit; -import javax.net.ssl.KeyManager; -import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManager; -import javax.net.ssl.X509KeyManager; -import javax.net.ssl.X509TrustManager; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -@SuppressWarnings("deprecation") // Testing deprecated code -class JaegerGrpcSpanExporterTest { - private static final BlockingQueue postedRequests = - new LinkedBlockingDeque<>(); - - @RegisterExtension - static final ServerExtension server = - new ServerExtension() { - @Override - protected void configure(ServerBuilder sb) { - sb.service( - JaegerGrpcSpanExporterBuilder.GRPC_ENDPOINT_PATH, - new AbstractUnaryGrpcService() { - @Override - protected CompletionStage handleMessage( - ServiceRequestContext ctx, byte[] message) { - try { - postedRequests.add(Collector.PostSpansRequest.parseFrom(message)); - } catch (InvalidProtocolBufferException e) { - CompletableFuture future = new CompletableFuture<>(); - future.completeExceptionally(e); - return future; - } - return CompletableFuture.completedFuture( - Collector.PostSpansResponse.getDefaultInstance().toByteArray()); - } - }); - } - }; - - @RegisterExtension LogCapturer logs = LogCapturer.create().captureForType(GrpcExporter.class); - - @RegisterExtension - static final SelfSignedCertificateExtension serverTls = new SelfSignedCertificateExtension(); - - @RegisterExtension - static final SelfSignedCertificateExtension clientTls = new SelfSignedCertificateExtension(); - - private static JaegerGrpcSpanExporter exporter; - - @BeforeAll - static void setUp() { - exporter = - JaegerGrpcSpanExporter.builder() - .setEndpoint(server.httpUri().toString()) - .setMeterProvider(MeterProvider.noop()) - .build(); - } - - @AfterAll - static void tearDown() { - exporter.shutdown(); - } - - @AfterEach - void reset() { - postedRequests.clear(); - } - - @Test - void testExport() throws Exception { - SpanData span = - testSpanData( - Resource.create( - Attributes.of( - stringKey("service.name"), - "myServiceName", - stringKey("resource-attr-key"), - "resource-attr-value")), - "GET /api/endpoint"); - - // test - CompletableResultCode result = exporter.export(Collections.singletonList(span)); - result.join(10, TimeUnit.SECONDS); - assertThat(result.isSuccess()).isEqualTo(true); - - // verify - assertThat(postedRequests).hasSize(1); - Model.Batch batch = postedRequests.poll().getBatch(); - assertThat(batch.getSpans(0).getOperationName()).isEqualTo("GET /api/endpoint"); - assertThat(SpanId.fromBytes(batch.getSpans(0).getSpanId().toByteArray())) - .isEqualTo(span.getSpanContext().getSpanId()); - - assertThat( - getTagValue(batch.getProcess().getTagsList(), "resource-attr-key") - .orElseThrow(() -> new AssertionError("resource-attr-key not found")) - .getVStr()) - .isEqualTo("resource-attr-value"); - - verifyBatch(batch); - assertThat(batch.getProcess().getServiceName()).isEqualTo("myServiceName"); - } - - @Test - void testExportMultipleResources() throws Exception { - SpanData span = - testSpanData( - Resource.create( - Attributes.of( - stringKey("service.name"), - "myServiceName1", - stringKey("resource-attr-key-1"), - "resource-attr-value-1")), - "GET /api/endpoint/1"); - - SpanData span2 = - testSpanData( - Resource.create( - Attributes.of( - stringKey("service.name"), - "myServiceName2", - stringKey("resource-attr-key-2"), - "resource-attr-value-2")), - "GET /api/endpoint/2"); - - // test - CompletableResultCode result = exporter.export(Arrays.asList(span, span2)); - result.join(10, TimeUnit.SECONDS); - assertThat(result.isSuccess()).isEqualTo(true); - - // verify - assertThat(postedRequests).hasSize(2); - List requests = new ArrayList<>(postedRequests); - assertThat(requests).hasSize(2); - for (Collector.PostSpansRequest request : requests) { - Model.Batch batch = request.getBatch(); - - verifyBatch(batch); - - Optional processTag = - getTagValue(batch.getProcess().getTagsList(), "resource-attr-key-1"); - Optional processTag2 = - getTagValue(batch.getProcess().getTagsList(), "resource-attr-key-2"); - if (processTag.isPresent()) { - assertThat(processTag2.isPresent()).isFalse(); - assertThat(batch.getSpans(0).getOperationName()).isEqualTo("GET /api/endpoint/1"); - assertThat(SpanId.fromBytes(batch.getSpans(0).getSpanId().toByteArray())) - .isEqualTo(span.getSpanContext().getSpanId()); - assertThat(processTag.get().getVStr()).isEqualTo("resource-attr-value-1"); - assertThat(batch.getProcess().getServiceName()).isEqualTo("myServiceName1"); - } else if (processTag2.isPresent()) { - assertThat(batch.getSpans(0).getOperationName()).isEqualTo("GET /api/endpoint/2"); - assertThat(SpanId.fromBytes(batch.getSpans(0).getSpanId().toByteArray())) - .isEqualTo(span2.getSpanContext().getSpanId()); - assertThat(processTag2.get().getVStr()).isEqualTo("resource-attr-value-2"); - assertThat(batch.getProcess().getServiceName()).isEqualTo("myServiceName2"); - } else { - fail("No process tag resource-attr-key-1 or resource-attr-key-2"); - } - } - } - - private static void verifyBatch(Model.Batch batch) throws Exception { - assertThat(batch.getSpansCount()).isEqualTo(1); - assertThat(TraceId.fromBytes(batch.getSpans(0).getTraceId().toByteArray())).isNotNull(); - assertThat(batch.getProcess().getTagsCount()).isEqualTo(5); - - assertThat( - getSpanTagValue(batch.getSpans(0), "otel.scope.name") - .orElseThrow(() -> new AssertionError("otel.scope.name not found")) - .getVStr()) - .isEqualTo("io.opentelemetry.auto"); - - assertThat( - getSpanTagValue(batch.getSpans(0), "otel.library.name") - .orElseThrow(() -> new AssertionError("otel.library.name not found")) - .getVStr()) - .isEqualTo("io.opentelemetry.auto"); - - assertThat( - getSpanTagValue(batch.getSpans(0), "otel.library.version") - .orElseThrow(() -> new AssertionError("otel.library.version not found")) - .getVStr()) - .isEqualTo("1.0.0"); - - assertThat( - getSpanTagValue(batch.getSpans(0), "otel.scope.version") - .orElseThrow(() -> new AssertionError("otel.scope.version not found")) - .getVStr()) - .isEqualTo("1.0.0"); - - assertThat( - getTagValue(batch.getProcess().getTagsList(), "ip") - .orElseThrow(() -> new AssertionError("ip not found")) - .getVStr()) - .isEqualTo(exporter.getJaegerResource().getAttribute(JaegerGrpcSpanExporter.IP_KEY)); - - assertThat( - getTagValue(batch.getProcess().getTagsList(), "hostname") - .orElseThrow(() -> new AssertionError("hostname not found")) - .getVStr()) - .isEqualTo(InetAddress.getLocalHost().getHostName()); - - assertThat( - getTagValue(batch.getProcess().getTagsList(), "jaeger.version") - .orElseThrow(() -> new AssertionError("jaeger.version not found")) - .getVStr()) - .isEqualTo("opentelemetry-java"); - } - - private static Optional getSpanTagValue(Model.Span span, String tagKey) { - return getTagValue(span.getTagsList(), tagKey); - } - - private static Optional getTagValue(List tags, String tagKey) { - return tags.stream().filter(kv -> kv.getKey().equals(tagKey)).findFirst(); - } - - private static SpanData testSpanData(Resource resource, String spanName) { - long duration = 900; // ms - long startMs = System.currentTimeMillis(); - long endMs = startMs + duration; - return TestSpanData.builder() - .setHasEnded(true) - .setSpanContext( - SpanContext.create( - IdGenerator.random().generateTraceId(), - IdGenerator.random().generateSpanId(), - TraceFlags.getSampled(), - TraceState.getDefault())) - .setName(spanName) - .setStartEpochNanos(TimeUnit.MILLISECONDS.toNanos(startMs)) - .setEndEpochNanos(TimeUnit.MILLISECONDS.toNanos(endMs)) - .setStatus(StatusData.ok()) - .setKind(SpanKind.CONSUMER) - .setLinks(Collections.emptyList()) - .setTotalRecordedLinks(0) - .setTotalRecordedEvents(0) - .setInstrumentationScopeInfo( - InstrumentationScopeInfo.builder("io.opentelemetry.auto").setVersion("1.0.0").build()) - .setResource(resource) - .build(); - } - - @Test - void validTrustedConfig() throws Exception { - assertThatCode( - () -> - JaegerGrpcSpanExporter.builder() - .setTrustedCertificates(serverTls.certificate().getEncoded())) - .doesNotThrowAnyException(); - } - - @Test - void validClientKeyConfig() throws Exception { - assertThatCode( - () -> - JaegerGrpcSpanExporter.builder() - .setClientTls( - clientTls.privateKey().getEncoded(), serverTls.certificate().getEncoded())) - .doesNotThrowAnyException(); - } - - @Test - void validSslContextConfig() throws Exception { - X509TrustManager trustManager = TlsUtil.trustManager(serverTls.certificate().getEncoded()); - - X509KeyManager keyManager = - TlsUtil.keyManager( - clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); - - SSLContext sslContext = SSLContext.getInstance("TLS"); - sslContext.init(new KeyManager[] {keyManager}, new TrustManager[] {trustManager}, null); - - assertThatCode(() -> JaegerGrpcSpanExporter.builder().setSslContext(sslContext, trustManager)) - .doesNotThrowAnyException(); - } - - @Test - @SuppressWarnings("PreferJavaTimeOverload") - void invalidConfig() { - assertThatThrownBy(() -> JaegerGrpcSpanExporter.builder().setTimeout(-1, TimeUnit.MILLISECONDS)) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("timeout must be non-negative"); - assertThatThrownBy(() -> JaegerGrpcSpanExporter.builder().setTimeout(1, null)) - .isInstanceOf(NullPointerException.class) - .hasMessage("unit"); - assertThatThrownBy(() -> JaegerGrpcSpanExporter.builder().setTimeout(null)) - .isInstanceOf(NullPointerException.class) - .hasMessage("timeout"); - - assertThatThrownBy(() -> JaegerGrpcSpanExporter.builder().setEndpoint(null)) - .isInstanceOf(NullPointerException.class) - .hasMessage("endpoint"); - assertThatThrownBy(() -> JaegerGrpcSpanExporter.builder().setEndpoint("😺://localhost")) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("Invalid endpoint, must be a URL: 😺://localhost") - .hasCauseInstanceOf(URISyntaxException.class); - assertThatThrownBy(() -> JaegerGrpcSpanExporter.builder().setEndpoint("localhost")) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("Invalid endpoint, must start with http:// or https://: localhost"); - assertThatThrownBy(() -> JaegerGrpcSpanExporter.builder().setEndpoint("gopher://localhost")) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("Invalid endpoint, must start with http:// or https://: gopher://localhost"); - - assertThatThrownBy(() -> JaegerGrpcSpanExporter.builder().setCompression(null)) - .isInstanceOf(NullPointerException.class) - .hasMessage("compressionMethod"); - assertThatThrownBy(() -> JaegerGrpcSpanExporter.builder().setCompression("foo")) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage( - "Unsupported compression method. Supported compression methods include: gzip, none."); - } - - @Test - void compressionDefault() { - JaegerGrpcSpanExporter exporter = JaegerGrpcSpanExporter.builder().build(); - try { - assertThat(exporter).extracting("delegate.grpcSender.compressionEnabled").isEqualTo(false); - } finally { - exporter.shutdown(); - } - } - - @Test - void compressionNone() { - JaegerGrpcSpanExporter exporter = - JaegerGrpcSpanExporter.builder().setCompression("none").build(); - try { - assertThat(exporter).extracting("delegate.grpcSender.compressionEnabled").isEqualTo(false); - } finally { - exporter.shutdown(); - } - } - - @Test - void compressionGzip() { - JaegerGrpcSpanExporter exporter = - JaegerGrpcSpanExporter.builder().setCompression("gzip").build(); - try { - assertThat(exporter).extracting("delegate.grpcSender.compressionEnabled").isEqualTo(true); - } finally { - exporter.shutdown(); - } - } - - @Test - void compressionEnabledAndDisabled() { - JaegerGrpcSpanExporter exporter = - JaegerGrpcSpanExporter.builder().setCompression("gzip").setCompression("none").build(); - try { - assertThat(exporter).extracting("delegate.grpcSender.compressionEnabled").isEqualTo(false); - } finally { - exporter.shutdown(); - } - } - - @Test - @SuppressLogger(GrpcExporter.class) - void shutdown() { - JaegerGrpcSpanExporter exporter = - JaegerGrpcSpanExporter.builder().setEndpoint(server.httpUri().toString()).build(); - assertThat(exporter.shutdown().join(1, TimeUnit.SECONDS).isSuccess()).isTrue(); - assertThat(logs.getEvents()).isEmpty(); - assertThat( - exporter - .export(Collections.singletonList(testSpanData(Resource.getDefault(), "span name"))) - .join(10, TimeUnit.SECONDS) - .isSuccess()) - .isFalse(); - assertThat(exporter.shutdown().join(1, TimeUnit.SECONDS).isSuccess()).isTrue(); - logs.assertContains("Calling shutdown() multiple times."); - } -} diff --git a/exporters/jaeger/src/test/java/io/opentelemetry/exporter/jaeger/JaegerIntegrationTest.java b/exporters/jaeger/src/test/java/io/opentelemetry/exporter/jaeger/JaegerIntegrationTest.java deleted file mode 100644 index b0e32ba636e..00000000000 --- a/exporters/jaeger/src/test/java/io/opentelemetry/exporter/jaeger/JaegerIntegrationTest.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import static io.opentelemetry.api.common.AttributeKey.stringKey; - -import com.fasterxml.jackson.core.TreeNode; -import com.fasterxml.jackson.jr.ob.JSON; -import com.fasterxml.jackson.jr.stree.JacksonJrsTreeCodec; -import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.trace.Span; -import io.opentelemetry.sdk.OpenTelemetrySdk; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.trace.SdkTracerProvider; -import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; -import io.opentelemetry.sdk.trace.export.SpanExporter; -import java.time.Duration; -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.Response; -import org.awaitility.Awaitility; -import org.junit.jupiter.api.Test; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.wait.strategy.Wait; -import org.testcontainers.images.PullPolicy; -import org.testcontainers.junit.jupiter.Container; -import org.testcontainers.junit.jupiter.Testcontainers; - -@Testcontainers(disabledWithoutDocker = true) -@SuppressWarnings("deprecation") // Testing deprecated code -class JaegerIntegrationTest { - private static final OkHttpClient client = new OkHttpClient(); - - private static final int QUERY_PORT = 16686; - private static final int COLLECTOR_PORT = 14250; - private static final int HEALTH_PORT = 14269; - private static final String SERVICE_NAME = "E2E-test"; - private static final String JAEGER_URL = "http://localhost"; - - @Container - public static GenericContainer jaegerContainer = - new GenericContainer<>("ghcr.io/open-telemetry/opentelemetry-java/jaeger:1.32") - .withImagePullPolicy(PullPolicy.alwaysPull()) - .withExposedPorts(COLLECTOR_PORT, QUERY_PORT, HEALTH_PORT) - .waitingFor(Wait.forHttp("/").forPort(HEALTH_PORT)); - - @Test - void testJaegerIntegration() { - OpenTelemetry openTelemetry = initOpenTelemetry(); - imitateWork(openTelemetry); - Awaitility.await() - .atMost(Duration.ofSeconds(30)) - .until(JaegerIntegrationTest::assertJaegerHaveTrace); - } - - private static OpenTelemetry initOpenTelemetry() { - SpanExporter jaegerExporter = - JaegerGrpcSpanExporter.builder() - .setEndpoint("http://localhost:" + jaegerContainer.getMappedPort(COLLECTOR_PORT)) - .setTimeout(Duration.ofSeconds(30)) - .build(); - return OpenTelemetrySdk.builder() - .setTracerProvider( - SdkTracerProvider.builder() - .addSpanProcessor(SimpleSpanProcessor.create(jaegerExporter)) - .setResource( - Resource.getDefault().toBuilder() - .put(stringKey("service.name"), SERVICE_NAME) - .build()) - .build()) - .build(); - } - - private void imitateWork(OpenTelemetry openTelemetry) { - Span span = - openTelemetry.getTracer(getClass().getCanonicalName()).spanBuilder("Test span").startSpan(); - span.addEvent("some event"); - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - span.end(); - } - - private static boolean assertJaegerHaveTrace() { - try { - String url = - String.format( - "%s/api/traces?service=%s", - String.format(JAEGER_URL + ":%d", jaegerContainer.getMappedPort(QUERY_PORT)), - SERVICE_NAME); - - Request request = - new Request.Builder() - .url(url) - .header("Content-Type", "application/json") - .header("Accept", "application/json") - .build(); - - TreeNode json; - try (Response response = client.newCall(request).execute()) { - json = - JSON.builder() - .treeCodec(new JacksonJrsTreeCodec()) - .build() - .treeFrom(response.body().byteStream()); - } - - return json.get("data").get(0).get("traceID") != null; - } catch (Exception e) { - return false; - } - } -} diff --git a/exporters/jaeger/src/test/java/io/opentelemetry/exporter/jaeger/PostSpansRequestMarshalerTest.java b/exporters/jaeger/src/test/java/io/opentelemetry/exporter/jaeger/PostSpansRequestMarshalerTest.java deleted file mode 100644 index 00e10c2124e..00000000000 --- a/exporters/jaeger/src/test/java/io/opentelemetry/exporter/jaeger/PostSpansRequestMarshalerTest.java +++ /dev/null @@ -1,437 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger; - -import static io.opentelemetry.api.common.AttributeKey.booleanArrayKey; -import static io.opentelemetry.api.common.AttributeKey.booleanKey; -import static io.opentelemetry.api.common.AttributeKey.doubleArrayKey; -import static io.opentelemetry.api.common.AttributeKey.doubleKey; -import static io.opentelemetry.api.common.AttributeKey.longArrayKey; -import static io.opentelemetry.api.common.AttributeKey.longKey; -import static io.opentelemetry.api.common.AttributeKey.stringArrayKey; -import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static org.assertj.core.api.Assertions.assertThat; - -import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.Message; -import com.google.protobuf.util.Durations; -import com.google.protobuf.util.Timestamps; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.trace.SpanContext; -import io.opentelemetry.api.trace.SpanId; -import io.opentelemetry.api.trace.SpanKind; -import io.opentelemetry.api.trace.TraceFlags; -import io.opentelemetry.api.trace.TraceId; -import io.opentelemetry.api.trace.TraceState; -import io.opentelemetry.exporter.internal.marshal.Marshaler; -import io.opentelemetry.exporter.jaeger.proto.api_v2.Model; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.testing.trace.TestSpanData; -import io.opentelemetry.sdk.trace.data.EventData; -import io.opentelemetry.sdk.trace.data.LinkData; -import io.opentelemetry.sdk.trace.data.SpanData; -import io.opentelemetry.sdk.trace.data.StatusData; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.TimeUnit; -import javax.annotation.Nullable; -import org.junit.jupiter.api.Test; - -class PostSpansRequestMarshalerTest { - - private static final String KEY_LOG_EVENT = "event"; - private static final String KEY_EVENT_DROPPED_ATTRIBUTES_COUNT = - "otel.event.dropped_attributes_count"; - private static final String KEY_DROPPED_ATTRIBUTES_COUNT = "otel.dropped_attributes_count"; - private static final String KEY_DROPPED_EVENTS_COUNT = "otel.dropped_events_count"; - private static final String KEY_SPAN_KIND = "span.kind"; - - private static final String LINK_TRACE_ID = "00000000000000000000000000cba123"; - private static final String LINK_SPAN_ID = "0000000000fed456"; - private static final String TRACE_ID = "00000000000000000000000000abc123"; - private static final String SPAN_ID = "0000000000def456"; - private static final String PARENT_SPAN_ID = "0000000000aef789"; - - @Test - void testProtoSpans() { - long duration = 900; // ms - long startMs = System.currentTimeMillis(); - long endMs = startMs + duration; - - SpanData span = getSpanData(startMs, endMs, SpanKind.SERVER); - List spans = Collections.singletonList(span); - - SpanMarshaler[] jaegerSpans = SpanMarshaler.createRepeated(spans); - - // the span contents are checked somewhere else - assertThat(jaegerSpans).hasSize(1); - } - - @Test - @SuppressWarnings({"ProtoTimestampGetSecondsGetNano", "ProtoDurationGetSecondsGetNano"}) - void testProtoSpan() { - long duration = 900; // ms - long startMs = System.currentTimeMillis(); - long endMs = startMs + duration; - - SpanData span = getSpanData(startMs, endMs, SpanKind.SERVER, 2); - - // test - Model.Span jaegerSpan = parse(Model.Span.getDefaultInstance(), SpanMarshaler.create(span)); - assertThat(TraceId.fromBytes(jaegerSpan.getTraceId().toByteArray())) - .isEqualTo(span.getTraceId()); - assertThat(SpanId.fromBytes(jaegerSpan.getSpanId().toByteArray())).isEqualTo(span.getSpanId()); - assertThat(jaegerSpan.getOperationName()).isEqualTo("GET /api/endpoint"); - assertThat(jaegerSpan.getStartTime()).isEqualTo(Timestamps.fromMillis(startMs)); - assertThat(jaegerSpan.getDuration()).isEqualTo(Durations.fromMillis(duration)); - - assertThat(jaegerSpan.getTagsCount()).isEqualTo(7); - Model.KeyValue keyValue = getValue(jaegerSpan.getTagsList(), KEY_SPAN_KIND); - assertThat(keyValue).isNotNull(); - assertThat(keyValue.getVStr()).isEqualTo("server"); - - Model.KeyValue droppedAttributes = - getValue(jaegerSpan.getTagsList(), KEY_DROPPED_ATTRIBUTES_COUNT); - assertThat(droppedAttributes) - .isEqualTo( - Model.KeyValue.newBuilder() - .setKey(KEY_DROPPED_ATTRIBUTES_COUNT) - .setVType(Model.ValueType.INT64) - .setVInt64(2) - .build()); - - assertThat(jaegerSpan.getLogsCount()).isEqualTo(1); - Model.KeyValue droppedEvents = getValue(jaegerSpan.getTagsList(), KEY_DROPPED_EVENTS_COUNT); - assertThat(droppedEvents) - .isEqualTo( - Model.KeyValue.newBuilder() - .setKey(KEY_DROPPED_EVENTS_COUNT) - .setVType(Model.ValueType.INT64) - .setVInt64(1) - .build()); - - Model.Log log = jaegerSpan.getLogs(0); - keyValue = getValue(log.getFieldsList(), KEY_LOG_EVENT); - assertThat(keyValue).isNotNull(); - assertThat(keyValue.getVStr()).isEqualTo("the log message"); - keyValue = getValue(log.getFieldsList(), "foo"); - assertThat(keyValue).isNotNull(); - assertThat(keyValue.getVStr()).isEqualTo("bar"); - - assertThat(jaegerSpan.getReferencesCount()).isEqualTo(2); - - assertHasFollowsFrom(jaegerSpan); - assertHasParent(jaegerSpan); - } - - @Test - void testProtoSpan_internal() { - long duration = 900; // ms - long startMs = System.currentTimeMillis(); - long endMs = startMs + duration; - - SpanData span = getSpanData(startMs, endMs, SpanKind.INTERNAL); - - // test - Model.Span jaegerSpan = parse(Model.Span.getDefaultInstance(), SpanMarshaler.create(span)); - Model.KeyValue keyValue = getValue(jaegerSpan.getTagsList(), KEY_SPAN_KIND); - assertThat(keyValue).isNull(); - } - - @Test - void testJaegerLogs() { - // prepare - EventData eventsData = getTimedEvent(); - - // test - LogMarshaler[] logs = LogMarshaler.createRepeated(Collections.singletonList(eventsData)); - - // verify - assertThat(logs).hasSize(1); - } - - @Test - void testJaegerLog() { - // prepare - EventData event = getTimedEvent(); - - // test - Model.Log log = parse(Model.Log.getDefaultInstance(), LogMarshaler.create(event)); - - // verify - assertThat(log.getFieldsCount()).isEqualTo(2); - - Model.KeyValue keyValue = getValue(log.getFieldsList(), KEY_LOG_EVENT); - assertThat(keyValue).isNotNull(); - assertThat(keyValue.getVStr()).isEqualTo("the log message"); - keyValue = getValue(log.getFieldsList(), "foo"); - assertThat(keyValue).isNotNull(); - assertThat(keyValue.getVStr()).isEqualTo("bar"); - keyValue = getValue(log.getFieldsList(), KEY_EVENT_DROPPED_ATTRIBUTES_COUNT); - assertThat(keyValue).isNull(); - - // verify dropped_attributes_count - event = getTimedEvent(3); - log = parse(Model.Log.getDefaultInstance(), LogMarshaler.create(event)); - keyValue = getValue(log.getFieldsList(), KEY_EVENT_DROPPED_ATTRIBUTES_COUNT); - assertThat(keyValue).isNotNull(); - assertThat(keyValue.getVInt64()).isEqualTo(2); - } - - @Test - void testKeyValue() { - // test - Model.KeyValue kvB = - parse( - Model.KeyValue.getDefaultInstance(), - KeyValueMarshaler.create(booleanKey("valueB"), true)); - Model.KeyValue kvD = - parse( - Model.KeyValue.getDefaultInstance(), KeyValueMarshaler.create(doubleKey("valueD"), 1.)); - Model.KeyValue kvI = - parse(Model.KeyValue.getDefaultInstance(), KeyValueMarshaler.create(longKey("valueI"), 2L)); - Model.KeyValue kvS = - parse( - Model.KeyValue.getDefaultInstance(), - KeyValueMarshaler.create(stringKey("valueS"), "foobar")); - Model.KeyValue kvArrayB = - parse( - Model.KeyValue.getDefaultInstance(), - KeyValueMarshaler.create(booleanArrayKey("valueArrayB"), Arrays.asList(true, false))); - Model.KeyValue kvArrayD = - parse( - Model.KeyValue.getDefaultInstance(), - KeyValueMarshaler.create(doubleArrayKey("valueArrayD"), Arrays.asList(1.2345, 6.789))); - Model.KeyValue kvArrayI = - parse( - Model.KeyValue.getDefaultInstance(), - KeyValueMarshaler.create(longArrayKey("valueArrayI"), Arrays.asList(12345L, 67890L))); - Model.KeyValue kvArrayS = - parse( - Model.KeyValue.getDefaultInstance(), - KeyValueMarshaler.create( - stringArrayKey("valueArrayS"), Arrays.asList("foobar", "barfoo"))); - - // verify - assertThat(kvB.getVBool()).isTrue(); - assertThat(kvB.getVType()).isEqualTo(Model.ValueType.BOOL); - assertThat(kvD.getVFloat64()).isEqualTo(1.); - assertThat(kvD.getVType()).isEqualTo(Model.ValueType.FLOAT64); - assertThat(kvI.getVInt64()).isEqualTo(2); - assertThat(kvI.getVType()).isEqualTo(Model.ValueType.INT64); - assertThat(kvS.getVStr()).isEqualTo("foobar"); - assertThat(kvS.getVStrBytes().toStringUtf8()).isEqualTo("foobar"); - assertThat(kvS.getVType()).isEqualTo(Model.ValueType.STRING); - assertThat(kvArrayB.getVStr()).isEqualTo("[true,false]"); - assertThat(kvArrayB.getVStrBytes().toStringUtf8()).isEqualTo("[true,false]"); - assertThat(kvArrayB.getVType()).isEqualTo(Model.ValueType.STRING); - assertThat(kvArrayD.getVStr()).isEqualTo("[1.2345,6.789]"); - assertThat(kvArrayD.getVStrBytes().toStringUtf8()).isEqualTo("[1.2345,6.789]"); - assertThat(kvArrayD.getVType()).isEqualTo(Model.ValueType.STRING); - assertThat(kvArrayI.getVStr()).isEqualTo("[12345,67890]"); - assertThat(kvArrayI.getVStrBytes().toStringUtf8()).isEqualTo("[12345,67890]"); - assertThat(kvArrayI.getVType()).isEqualTo(Model.ValueType.STRING); - assertThat(kvArrayS.getVStr()).isEqualTo("[\"foobar\",\"barfoo\"]"); - assertThat(kvArrayS.getVStrBytes().toStringUtf8()).isEqualTo("[\"foobar\",\"barfoo\"]"); - assertThat(kvArrayS.getVType()).isEqualTo(Model.ValueType.STRING); - } - - @Test - void testSpanRefs() { - // prepare - LinkData link = - LinkData.create(createSpanContext("00000000000000000000000000cba123", "0000000000fed456")); - - // test - List spanRefs = - SpanRefMarshaler.createRepeated(Collections.singletonList(link)); - - // verify - assertThat(spanRefs).hasSize(1); // the actual span ref is tested in another test - } - - @Test - void testSpanRef() { - // prepare - LinkData link = LinkData.create(createSpanContext(TRACE_ID, SPAN_ID)); - - // test - Model.SpanRef spanRef = - parse(Model.SpanRef.getDefaultInstance(), SpanRefMarshaler.create(link)); - - // verify - assertThat(SpanId.fromBytes(spanRef.getSpanId().toByteArray())).isEqualTo(SPAN_ID); - assertThat(TraceId.fromBytes(spanRef.getTraceId().toByteArray())).isEqualTo(TRACE_ID); - assertThat(spanRef.getRefType()).isEqualTo(Model.SpanRefType.FOLLOWS_FROM); - } - - @Test - void testStatusNotUnset() { - long startMs = System.currentTimeMillis(); - long endMs = startMs + 900; - SpanData span = - TestSpanData.builder() - .setHasEnded(true) - .setSpanContext(createSpanContext(TRACE_ID, SPAN_ID)) - .setName("GET /api/endpoint") - .setStartEpochNanos(TimeUnit.MILLISECONDS.toNanos(startMs)) - .setEndEpochNanos(TimeUnit.MILLISECONDS.toNanos(endMs)) - .setKind(SpanKind.SERVER) - .setStatus(StatusData.error()) - .setTotalRecordedEvents(0) - .setTotalRecordedLinks(0) - .build(); - - assertThat(SpanMarshaler.create(span)).isNotNull(); - } - - @Test - void testSpanError() { - Attributes attributes = - Attributes.of( - stringKey("error.type"), - this.getClass().getName(), - stringKey("error.message"), - "server error"); - long startMs = System.currentTimeMillis(); - long endMs = startMs + 900; - SpanData span = - TestSpanData.builder() - .setHasEnded(true) - .setSpanContext(createSpanContext(TRACE_ID, SPAN_ID)) - .setName("GET /api/endpoint") - .setStartEpochNanos(TimeUnit.MILLISECONDS.toNanos(startMs)) - .setEndEpochNanos(TimeUnit.MILLISECONDS.toNanos(endMs)) - .setKind(SpanKind.SERVER) - .setStatus(StatusData.error()) - .setAttributes(attributes) - .setTotalRecordedEvents(0) - .setTotalRecordedLinks(0) - .build(); - - Model.Span jaegerSpan = parse(Model.Span.getDefaultInstance(), SpanMarshaler.create(span)); - Model.KeyValue errorType = getValue(jaegerSpan.getTagsList(), "error.type"); - assertThat(errorType).isNotNull(); - assertThat(errorType.getVStr()).isEqualTo(this.getClass().getName()); - Model.KeyValue error = getValue(jaegerSpan.getTagsList(), "error"); - assertThat(error).isNotNull(); - assertThat(error.getVBool()).isTrue(); - } - - private static EventData getTimedEvent() { - return getTimedEvent(-1); - } - - private static EventData getTimedEvent(int totalAttributeCount) { - long epochNanos = TimeUnit.MILLISECONDS.toNanos(System.currentTimeMillis()); - Attributes attributes = Attributes.of(stringKey("foo"), "bar"); - if (totalAttributeCount <= 0) { - totalAttributeCount = attributes.size(); - } - return EventData.create(epochNanos, "the log message", attributes, totalAttributeCount); - } - - private static SpanData getSpanData(long startMs, long endMs, SpanKind kind) { - return getSpanData(startMs, endMs, kind, 1); - } - - private static SpanData getSpanData( - long startMs, long endMs, SpanKind kind, int totalRecordedEvents) { - Attributes attributes = Attributes.of(booleanKey("valueB"), true); - - LinkData link = LinkData.create(createSpanContext(LINK_TRACE_ID, LINK_SPAN_ID), attributes); - - return TestSpanData.builder() - .setHasEnded(true) - .setSpanContext(createSpanContext(TRACE_ID, SPAN_ID)) - .setParentSpanContext( - SpanContext.create( - TRACE_ID, PARENT_SPAN_ID, TraceFlags.getDefault(), TraceState.getDefault())) - .setName("GET /api/endpoint") - .setStartEpochNanos(TimeUnit.MILLISECONDS.toNanos(startMs)) - .setEndEpochNanos(TimeUnit.MILLISECONDS.toNanos(endMs)) - .setAttributes(Attributes.of(booleanKey("valueB"), true)) - .setTotalAttributeCount(3) - .setEvents(Collections.singletonList(getTimedEvent())) - .setTotalRecordedEvents(totalRecordedEvents) - .setLinks(Collections.singletonList(link)) - .setTotalRecordedLinks(1) - .setKind(kind) - .setResource(Resource.create(Attributes.empty())) - .setStatus(StatusData.ok()) - .build(); - } - - private static SpanContext createSpanContext(String traceId, String spanId) { - return SpanContext.create(traceId, spanId, TraceFlags.getSampled(), TraceState.getDefault()); - } - - @Nullable - private static Model.KeyValue getValue(List tagsList, String s) { - for (Model.KeyValue kv : tagsList) { - if (kv.getKey().equals(s)) { - return kv; - } - } - return null; - } - - private static void assertHasFollowsFrom(Model.Span jaegerSpan) { - boolean found = false; - for (Model.SpanRef spanRef : jaegerSpan.getReferencesList()) { - if (Model.SpanRefType.FOLLOWS_FROM.equals(spanRef.getRefType())) { - assertThat(TraceId.fromBytes(spanRef.getTraceId().toByteArray())).isEqualTo(LINK_TRACE_ID); - assertThat(SpanId.fromBytes(spanRef.getSpanId().toByteArray())).isEqualTo(LINK_SPAN_ID); - found = true; - } - } - assertThat(found).withFailMessage("Should have found the follows-from reference").isTrue(); - } - - private static void assertHasParent(Model.Span jaegerSpan) { - boolean found = false; - for (Model.SpanRef spanRef : jaegerSpan.getReferencesList()) { - if (Model.SpanRefType.CHILD_OF.equals(spanRef.getRefType())) { - assertThat(TraceId.fromBytes(spanRef.getTraceId().toByteArray())).isEqualTo(TRACE_ID); - assertThat(SpanId.fromBytes(spanRef.getSpanId().toByteArray())).isEqualTo(PARENT_SPAN_ID); - found = true; - } - } - assertThat(found).withFailMessage("Should have found the parent reference").isTrue(); - } - - @SuppressWarnings("unchecked") - private static T parse(T prototype, Marshaler marshaler) { - byte[] serialized = toByteArray(marshaler); - T result; - try { - result = (T) prototype.newBuilderForType().mergeFrom(serialized).build(); - } catch (InvalidProtocolBufferException e) { - throw new UncheckedIOException(e); - } - // Our marshaler should produce the exact same length of serialized output (for example, field - // default values are not outputted), so we check that here. The output itself may have slightly - // different ordering, mostly due to the way we don't output oneof values in field order all the - // tieme. If the lengths are equal and the resulting protos are equal, the marshaling is - // guaranteed to be valid. - assertThat(result.getSerializedSize()).isEqualTo(serialized.length); - return result; - } - - private static byte[] toByteArray(Marshaler marshaler) { - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - try { - marshaler.writeBinaryTo(bos); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - return bos.toByteArray(); - } -} diff --git a/exporters/jaeger/src/test/java/io/opentelemetry/exporter/jaeger/internal/JaegerGrpcSpanExporterProviderTest.java b/exporters/jaeger/src/test/java/io/opentelemetry/exporter/jaeger/internal/JaegerGrpcSpanExporterProviderTest.java deleted file mode 100644 index 82ba56a2ebe..00000000000 --- a/exporters/jaeger/src/test/java/io/opentelemetry/exporter/jaeger/internal/JaegerGrpcSpanExporterProviderTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.jaeger.internal; - -import static org.assertj.core.api.Assertions.assertThat; - -import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; -import io.opentelemetry.sdk.trace.export.SpanExporter; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import okhttp3.HttpUrl; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("deprecation") // Testing deprecated code -class JaegerGrpcSpanExporterProviderTest { - - private static final JaegerGrpcSpanExporterProvider provider = - new JaegerGrpcSpanExporterProvider(); - - @Test - void getName() { - assertThat(provider.getName()).isEqualTo("jaeger"); - } - - @Test - void createExporter_Default() { - try (SpanExporter spanExporter = - provider.createExporter(DefaultConfigProperties.createFromMap(Collections.emptyMap()))) { - assertThat(spanExporter) - .isInstanceOf(io.opentelemetry.exporter.jaeger.JaegerGrpcSpanExporter.class); - assertThat(spanExporter) - .extracting("delegate.grpcSender") - .extracting("client") - .extracting("callTimeoutMillis") - .isEqualTo(10000); - assertThat(spanExporter) - .extracting("delegate.grpcSender") - .extracting("url") - .isEqualTo( - HttpUrl.get("http://localhost:14250/jaeger.api_v2.CollectorService/PostSpans")); - } - } - - @Test - void createExporter_WithConfiguration() { - Map config = new HashMap<>(); - config.put("otel.exporter.jaeger.endpoint", "http://endpoint:8080"); - config.put("otel.exporter.jaeger.timeout", "1s"); - - try (SpanExporter spanExporter = - provider.createExporter(DefaultConfigProperties.createFromMap(config))) { - assertThat(spanExporter) - .isInstanceOf(io.opentelemetry.exporter.jaeger.JaegerGrpcSpanExporter.class); - assertThat(spanExporter) - .extracting("delegate.grpcSender") - .extracting("client") - .extracting("callTimeoutMillis") - .isEqualTo(1000); - assertThat(spanExporter) - .extracting("delegate.grpcSender") - .extracting("url") - .isEqualTo(HttpUrl.get("http://endpoint:8080/jaeger.api_v2.CollectorService/PostSpans")); - } - } -} diff --git a/exporters/jaeger/src/test/resources/logback-test.xml b/exporters/jaeger/src/test/resources/logback-test.xml deleted file mode 100644 index 0f157506f5e..00000000000 --- a/exporters/jaeger/src/test/resources/logback-test.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - %d{HH:mm:ss.SSS} %-5level %logger - %msg%n - - - - - - - \ No newline at end of file diff --git a/exporters/logging-otlp/build.gradle.kts b/exporters/logging-otlp/build.gradle.kts index 07a2044b5f3..0870f2c9937 100644 --- a/exporters/logging-otlp/build.gradle.kts +++ b/exporters/logging-otlp/build.gradle.kts @@ -14,11 +14,14 @@ dependencies { implementation(project(":sdk:logs")) implementation(project(":exporters:otlp:common")) + compileOnly(project(":api:incubator")) implementation(project(":sdk-extensions:autoconfigure-spi")) implementation("com.fasterxml.jackson.core:jackson-core") + testImplementation(project(":api:incubator")) testImplementation(project(":sdk:testing")) + testImplementation("com.google.guava:guava") testImplementation("org.skyscreamer:jsonassert") } diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingLogRecordExporter.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingLogRecordExporter.java index c5b7d75cbe5..0c00cca908e 100644 --- a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingLogRecordExporter.java +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingLogRecordExporter.java @@ -5,18 +5,12 @@ package io.opentelemetry.exporter.logging.otlp; -import static io.opentelemetry.exporter.logging.otlp.JsonUtil.JSON_FACTORY; - -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.io.SegmentedStringWriter; -import io.opentelemetry.exporter.internal.otlp.logs.ResourceLogsMarshaler; +import io.opentelemetry.exporter.logging.otlp.internal.logs.OtlpStdoutLogRecordExporter; +import io.opentelemetry.exporter.logging.otlp.internal.logs.OtlpStdoutLogRecordExporterBuilder; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.logs.data.LogRecordData; import io.opentelemetry.sdk.logs.export.LogRecordExporter; -import java.io.IOException; import java.util.Collection; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.logging.Level; import java.util.logging.Logger; /** @@ -30,49 +24,31 @@ public final class OtlpJsonLoggingLogRecordExporter implements LogRecordExporter private static final Logger logger = Logger.getLogger(OtlpJsonLoggingLogRecordExporter.class.getName()); - private final AtomicBoolean isShutdown = new AtomicBoolean(); + private final OtlpStdoutLogRecordExporter delegate; /** Returns a new {@link OtlpJsonLoggingLogRecordExporter}. */ public static LogRecordExporter create() { - return new OtlpJsonLoggingLogRecordExporter(); + OtlpStdoutLogRecordExporter delegate = + new OtlpStdoutLogRecordExporterBuilder(logger).setWrapperJsonObject(false).build(); + return new OtlpJsonLoggingLogRecordExporter(delegate); } - private OtlpJsonLoggingLogRecordExporter() {} + OtlpJsonLoggingLogRecordExporter(OtlpStdoutLogRecordExporter delegate) { + this.delegate = delegate; + } @Override public CompletableResultCode export(Collection logs) { - if (isShutdown.get()) { - return CompletableResultCode.ofFailure(); - } - - ResourceLogsMarshaler[] allResourceLogs = ResourceLogsMarshaler.create(logs); - for (ResourceLogsMarshaler resourceLogs : allResourceLogs) { - SegmentedStringWriter sw = new SegmentedStringWriter(JSON_FACTORY._getBufferRecycler()); - try (JsonGenerator gen = JsonUtil.create(sw)) { - resourceLogs.writeJsonTo(gen); - } catch (IOException e) { - // Shouldn't happen in practice, just skip it. - continue; - } - try { - logger.log(Level.INFO, sw.getAndClear()); - } catch (IOException e) { - logger.log(Level.WARNING, "Unable to read OTLP JSON log records", e); - } - } - return CompletableResultCode.ofSuccess(); + return delegate.export(logs); } @Override public CompletableResultCode flush() { - return CompletableResultCode.ofSuccess(); + return delegate.flush(); } @Override public CompletableResultCode shutdown() { - if (!isShutdown.compareAndSet(false, true)) { - logger.log(Level.INFO, "Calling shutdown() multiple times."); - } - return CompletableResultCode.ofSuccess(); + return delegate.shutdown(); } } diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingMetricExporter.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingMetricExporter.java index fd0a491bdfb..b42ef4acab7 100644 --- a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingMetricExporter.java +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingMetricExporter.java @@ -5,24 +5,18 @@ package io.opentelemetry.exporter.logging.otlp; -import static io.opentelemetry.exporter.logging.otlp.JsonUtil.JSON_FACTORY; - -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.io.SegmentedStringWriter; -import io.opentelemetry.exporter.internal.otlp.metrics.ResourceMetricsMarshaler; +import io.opentelemetry.exporter.logging.otlp.internal.metrics.OtlpStdoutMetricExporter; +import io.opentelemetry.exporter.logging.otlp.internal.metrics.OtlpStdoutMetricExporterBuilder; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.export.MetricExporter; -import java.io.IOException; import java.util.Collection; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.logging.Level; import java.util.logging.Logger; /** - * A {@link MetricExporter} which writes {@linkplain MetricData spans} to a {@link Logger} in OTLP + * A {@link MetricExporter} which writes {@linkplain MetricData metrics} to a {@link Logger} in OTLP * JSON format. Each log line will include a single {@code ResourceMetrics}. */ public final class OtlpJsonLoggingMetricExporter implements MetricExporter { @@ -30,16 +24,16 @@ public final class OtlpJsonLoggingMetricExporter implements MetricExporter { private static final Logger logger = Logger.getLogger(OtlpJsonLoggingMetricExporter.class.getName()); - private final AtomicBoolean isShutdown = new AtomicBoolean(); - private final AggregationTemporality aggregationTemporality; + private final OtlpStdoutMetricExporter delegate; + /** * Returns a new {@link OtlpJsonLoggingMetricExporter} with a aggregation temporality of {@link * AggregationTemporality#CUMULATIVE}. */ public static MetricExporter create() { - return new OtlpJsonLoggingMetricExporter(AggregationTemporality.CUMULATIVE); + return create(AggregationTemporality.CUMULATIVE); } /** @@ -47,13 +41,32 @@ public static MetricExporter create() { * aggregationTemporality}. */ public static MetricExporter create(AggregationTemporality aggregationTemporality) { - return new OtlpJsonLoggingMetricExporter(aggregationTemporality); + OtlpStdoutMetricExporter delegate = + new OtlpStdoutMetricExporterBuilder(logger).setWrapperJsonObject(false).build(); + return new OtlpJsonLoggingMetricExporter(delegate, aggregationTemporality); } - private OtlpJsonLoggingMetricExporter(AggregationTemporality aggregationTemporality) { + OtlpJsonLoggingMetricExporter( + OtlpStdoutMetricExporter delegate, AggregationTemporality aggregationTemporality) { + this.delegate = delegate; this.aggregationTemporality = aggregationTemporality; } + @Override + public CompletableResultCode export(Collection logs) { + return delegate.export(logs); + } + + @Override + public CompletableResultCode flush() { + return delegate.flush(); + } + + @Override + public CompletableResultCode shutdown() { + return delegate.shutdown(); + } + /** * Return the aggregation temporality. * @@ -68,41 +81,4 @@ public AggregationTemporality getPreferredTemporality() { public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) { return aggregationTemporality; } - - @Override - public CompletableResultCode export(Collection metrics) { - if (isShutdown.get()) { - return CompletableResultCode.ofFailure(); - } - - ResourceMetricsMarshaler[] allResourceMetrics = ResourceMetricsMarshaler.create(metrics); - for (ResourceMetricsMarshaler resourceMetrics : allResourceMetrics) { - SegmentedStringWriter sw = new SegmentedStringWriter(JSON_FACTORY._getBufferRecycler()); - try (JsonGenerator gen = JsonUtil.create(sw)) { - resourceMetrics.writeJsonTo(gen); - } catch (IOException e) { - // Shouldn't happen in practice, just skip it. - continue; - } - try { - logger.log(Level.INFO, sw.getAndClear()); - } catch (IOException e) { - logger.log(Level.WARNING, "Unable to read OTLP JSON metrics", e); - } - } - return CompletableResultCode.ofSuccess(); - } - - @Override - public CompletableResultCode flush() { - return CompletableResultCode.ofSuccess(); - } - - @Override - public CompletableResultCode shutdown() { - if (!isShutdown.compareAndSet(false, true)) { - logger.log(Level.INFO, "Calling shutdown() multiple times."); - } - return CompletableResultCode.ofSuccess(); - } } diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingSpanExporter.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingSpanExporter.java index c57c9004d56..63901351326 100644 --- a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingSpanExporter.java +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingSpanExporter.java @@ -5,16 +5,12 @@ package io.opentelemetry.exporter.logging.otlp; -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.io.SegmentedStringWriter; -import io.opentelemetry.exporter.internal.otlp.traces.ResourceSpansMarshaler; +import io.opentelemetry.exporter.logging.otlp.internal.traces.OtlpStdoutSpanExporter; +import io.opentelemetry.exporter.logging.otlp.internal.traces.OtlpStdoutSpanExporterBuilder; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.export.SpanExporter; -import java.io.IOException; import java.util.Collection; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.logging.Level; import java.util.logging.Logger; /** @@ -26,50 +22,31 @@ public final class OtlpJsonLoggingSpanExporter implements SpanExporter { private static final Logger logger = Logger.getLogger(OtlpJsonLoggingSpanExporter.class.getName()); - private final AtomicBoolean isShutdown = new AtomicBoolean(); + private final OtlpStdoutSpanExporter delegate; /** Returns a new {@link OtlpJsonLoggingSpanExporter}. */ public static SpanExporter create() { - return new OtlpJsonLoggingSpanExporter(); + OtlpStdoutSpanExporter delegate = + new OtlpStdoutSpanExporterBuilder(logger).setWrapperJsonObject(false).build(); + return new OtlpJsonLoggingSpanExporter(delegate); } - private OtlpJsonLoggingSpanExporter() {} + OtlpJsonLoggingSpanExporter(OtlpStdoutSpanExporter delegate) { + this.delegate = delegate; + } @Override - public CompletableResultCode export(Collection spans) { - if (isShutdown.get()) { - return CompletableResultCode.ofFailure(); - } - - ResourceSpansMarshaler[] allResourceSpans = ResourceSpansMarshaler.create(spans); - for (ResourceSpansMarshaler resourceSpans : allResourceSpans) { - SegmentedStringWriter sw = - new SegmentedStringWriter(JsonUtil.JSON_FACTORY._getBufferRecycler()); - try (JsonGenerator gen = JsonUtil.create(sw)) { - resourceSpans.writeJsonTo(gen); - } catch (IOException e) { - // Shouldn't happen in practice, just skip it. - continue; - } - try { - logger.log(Level.INFO, sw.getAndClear()); - } catch (IOException e) { - logger.log(Level.WARNING, "Unable to read OTLP JSON spans", e); - } - } - return CompletableResultCode.ofSuccess(); + public CompletableResultCode export(Collection logs) { + return delegate.export(logs); } @Override public CompletableResultCode flush() { - return CompletableResultCode.ofSuccess(); + return delegate.flush(); } @Override public CompletableResultCode shutdown() { - if (!isShutdown.compareAndSet(false, true)) { - logger.log(Level.INFO, "Calling shutdown() multiple times."); - } - return CompletableResultCode.ofSuccess(); + return delegate.shutdown(); } } diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/LoggingLogRecordExporterProvider.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/LoggingLogRecordExporterProvider.java similarity index 82% rename from exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/LoggingLogRecordExporterProvider.java rename to exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/LoggingLogRecordExporterProvider.java index ebb0d2d0865..5e038f3c892 100644 --- a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/LoggingLogRecordExporterProvider.java +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/LoggingLogRecordExporterProvider.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.exporter.logging.otlp.internal; +package io.opentelemetry.exporter.logging.otlp.internal.logs; import io.opentelemetry.exporter.logging.otlp.OtlpJsonLoggingLogRecordExporter; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; @@ -16,7 +16,9 @@ *

This class is internal and is hence not for public use. Its APIs are unstable and can change * at any time. */ -public class LoggingLogRecordExporterProvider implements ConfigurableLogRecordExporterProvider { +public final class LoggingLogRecordExporterProvider + implements ConfigurableLogRecordExporterProvider { + @Override public LogRecordExporter createExporter(ConfigProperties config) { return OtlpJsonLoggingLogRecordExporter.create(); diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/OtlpStdoutLogRecordExporter.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/OtlpStdoutLogRecordExporter.java new file mode 100644 index 00000000000..11798e938f0 --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/OtlpStdoutLogRecordExporter.java @@ -0,0 +1,110 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.logs; + +import io.opentelemetry.exporter.internal.otlp.logs.LogReusableDataMarshaler; +import io.opentelemetry.exporter.internal.otlp.logs.ResourceLogsMarshaler; +import io.opentelemetry.exporter.logging.otlp.internal.writer.JsonWriter; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; +import java.util.Collection; +import java.util.StringJoiner; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * Exporter for sending OTLP log records to stdout. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtlpStdoutLogRecordExporter implements LogRecordExporter { + + private static final Logger LOGGER = + Logger.getLogger(OtlpStdoutLogRecordExporter.class.getName()); + + private final AtomicBoolean isShutdown = new AtomicBoolean(); + + private final Logger logger; + private final JsonWriter jsonWriter; + private final boolean wrapperJsonObject; + private final MemoryMode memoryMode; + private final Function, CompletableResultCode> marshaler; + + OtlpStdoutLogRecordExporter( + Logger logger, JsonWriter jsonWriter, boolean wrapperJsonObject, MemoryMode memoryMode) { + this.logger = logger; + this.jsonWriter = jsonWriter; + this.wrapperJsonObject = wrapperJsonObject; + this.memoryMode = memoryMode; + marshaler = createMarshaler(jsonWriter, memoryMode, wrapperJsonObject); + } + + /** Returns a new {@link OtlpStdoutLogRecordExporterBuilder}. */ + @SuppressWarnings("SystemOut") + public static OtlpStdoutLogRecordExporterBuilder builder() { + return new OtlpStdoutLogRecordExporterBuilder(LOGGER).setOutput(System.out); + } + + private static Function, CompletableResultCode> createMarshaler( + JsonWriter jsonWriter, MemoryMode memoryMode, boolean wrapperJsonObject) { + if (wrapperJsonObject) { + LogReusableDataMarshaler reusableDataMarshaler = + new LogReusableDataMarshaler( + memoryMode, (marshaler, numItems) -> jsonWriter.write(marshaler)); + return reusableDataMarshaler::export; + } else { + return logs -> { + // no support for low allocation marshaler + for (ResourceLogsMarshaler marshaler : ResourceLogsMarshaler.create(logs)) { + CompletableResultCode resultCode = jsonWriter.write(marshaler); + if (!resultCode.isSuccess()) { + // already logged + return resultCode; + } + } + return CompletableResultCode.ofSuccess(); + }; + } + } + + @Override + public CompletableResultCode export(Collection logs) { + if (isShutdown.get()) { + return CompletableResultCode.ofFailure(); + } + + return marshaler.apply(logs); + } + + @Override + public CompletableResultCode flush() { + return jsonWriter.flush(); + } + + @Override + public CompletableResultCode shutdown() { + if (!isShutdown.compareAndSet(false, true)) { + logger.log(Level.INFO, "Calling shutdown() multiple times."); + } else { + jsonWriter.close(); + } + return CompletableResultCode.ofSuccess(); + } + + @Override + public String toString() { + StringJoiner joiner = new StringJoiner(", ", "OtlpStdoutLogRecordExporter{", "}"); + joiner.add("jsonWriter=" + jsonWriter); + joiner.add("wrapperJsonObject=" + wrapperJsonObject); + joiner.add("memoryMode=" + memoryMode); + return joiner.toString(); + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/OtlpStdoutLogRecordExporterBuilder.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/OtlpStdoutLogRecordExporterBuilder.java new file mode 100644 index 00000000000..76e6adb20ad --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/OtlpStdoutLogRecordExporterBuilder.java @@ -0,0 +1,93 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.logs; + +import static java.util.Objects.requireNonNull; + +import io.opentelemetry.exporter.logging.otlp.OtlpJsonLoggingLogRecordExporter; +import io.opentelemetry.exporter.logging.otlp.internal.writer.JsonWriter; +import io.opentelemetry.exporter.logging.otlp.internal.writer.LoggerJsonWriter; +import io.opentelemetry.exporter.logging.otlp.internal.writer.StreamJsonWriter; +import io.opentelemetry.sdk.common.export.MemoryMode; +import java.io.OutputStream; +import java.util.logging.Logger; + +/** + * Builder for {@link OtlpJsonLoggingLogRecordExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtlpStdoutLogRecordExporterBuilder { + + private static final String TYPE = "log records"; + + private final Logger logger; + private JsonWriter jsonWriter; + private boolean wrapperJsonObject = true; + private MemoryMode memoryMode = MemoryMode.IMMUTABLE_DATA; + + public OtlpStdoutLogRecordExporterBuilder(Logger logger) { + this.logger = logger; + this.jsonWriter = new LoggerJsonWriter(logger, TYPE); + } + + /** + * Sets the exporter to use the specified JSON object wrapper. + * + * @param wrapperJsonObject whether to wrap the JSON object in an outer JSON "resourceLogs" + * object. + */ + public OtlpStdoutLogRecordExporterBuilder setWrapperJsonObject(boolean wrapperJsonObject) { + this.wrapperJsonObject = wrapperJsonObject; + return this; + } + + /** + * Set the {@link MemoryMode}. If unset, defaults to {@link MemoryMode#IMMUTABLE_DATA}. + * + *

When memory mode is {@link MemoryMode#REUSABLE_DATA}, serialization is optimized to reduce + * memory allocation. + */ + public OtlpStdoutLogRecordExporterBuilder setMemoryMode(MemoryMode memoryMode) { + this.memoryMode = memoryMode; + return this; + } + + /** + * Sets the exporter to use the specified output stream. + * + *

The output stream will be closed when {@link OtlpStdoutLogRecordExporter#shutdown()} is + * called unless it's {@link System#out} or {@link System#err}. + * + * @param outputStream the output stream to use. + */ + public OtlpStdoutLogRecordExporterBuilder setOutput(OutputStream outputStream) { + requireNonNull(outputStream, "outputStream"); + this.jsonWriter = new StreamJsonWriter(outputStream, TYPE); + return this; + } + + /** Sets the exporter to use the specified logger. */ + public OtlpStdoutLogRecordExporterBuilder setOutput(Logger logger) { + requireNonNull(logger, "logger"); + this.jsonWriter = new LoggerJsonWriter(logger, TYPE); + return this; + } + + /** + * Constructs a new instance of the exporter based on the builder's values. + * + * @return a new exporter's instance + */ + public OtlpStdoutLogRecordExporter build() { + if (memoryMode == MemoryMode.REUSABLE_DATA && !wrapperJsonObject) { + throw new IllegalArgumentException( + "Reusable data mode is not supported without wrapperJsonObject"); + } + return new OtlpStdoutLogRecordExporter(logger, jsonWriter, wrapperJsonObject, memoryMode); + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/OtlpStdoutLogRecordExporterComponentProvider.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/OtlpStdoutLogRecordExporterComponentProvider.java new file mode 100644 index 00000000000..57817346d71 --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/OtlpStdoutLogRecordExporterComponentProvider.java @@ -0,0 +1,38 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.logs; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.internal.IncubatingExporterBuilderUtil; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; + +/** + * Declarative configuration SPI implementation for {@link OtlpStdoutLogRecordExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtlpStdoutLogRecordExporterComponentProvider + implements ComponentProvider { + + @Override + public Class getType() { + return LogRecordExporter.class; + } + + @Override + public String getName() { + return "experimental-otlp/stdout"; + } + + @Override + public LogRecordExporter create(DeclarativeConfigProperties config) { + OtlpStdoutLogRecordExporterBuilder builder = OtlpStdoutLogRecordExporter.builder(); + IncubatingExporterBuilderUtil.configureExporterMemoryMode(config, builder::setMemoryMode); + return builder.build(); + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/OtlpStdoutLogRecordExporterProvider.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/OtlpStdoutLogRecordExporterProvider.java new file mode 100644 index 00000000000..08b8021590d --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/logs/OtlpStdoutLogRecordExporterProvider.java @@ -0,0 +1,32 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.logs; + +import io.opentelemetry.exporter.internal.ExporterBuilderUtil; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; + +/** + * {@link LogRecordExporter} SPI implementation for {@link OtlpStdoutLogRecordExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtlpStdoutLogRecordExporterProvider + implements ConfigurableLogRecordExporterProvider { + @Override + public LogRecordExporter createExporter(ConfigProperties config) { + OtlpStdoutLogRecordExporterBuilder builder = OtlpStdoutLogRecordExporter.builder(); + ExporterBuilderUtil.configureExporterMemoryMode(config, builder::setMemoryMode); + return builder.build(); + } + + @Override + public String getName() { + return "experimental-otlp/stdout"; + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/LoggingMetricExporterProvider.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/LoggingMetricExporterProvider.java similarity index 83% rename from exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/LoggingMetricExporterProvider.java rename to exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/LoggingMetricExporterProvider.java index b5669b5426a..6748bbd99d1 100644 --- a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/LoggingMetricExporterProvider.java +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/LoggingMetricExporterProvider.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.exporter.logging.otlp.internal; +package io.opentelemetry.exporter.logging.otlp.internal.metrics; import io.opentelemetry.exporter.logging.otlp.OtlpJsonLoggingMetricExporter; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; @@ -16,7 +16,8 @@ *

This class is internal and is hence not for public use. Its APIs are unstable and can change * at any time. */ -public class LoggingMetricExporterProvider implements ConfigurableMetricExporterProvider { +public final class LoggingMetricExporterProvider implements ConfigurableMetricExporterProvider { + @Override public MetricExporter createExporter(ConfigProperties config) { return OtlpJsonLoggingMetricExporter.create(); diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/OtlpStdoutMetricExporter.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/OtlpStdoutMetricExporter.java new file mode 100644 index 00000000000..4294c1d2571 --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/OtlpStdoutMetricExporter.java @@ -0,0 +1,144 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.metrics; + +import io.opentelemetry.exporter.internal.otlp.metrics.MetricReusableDataMarshaler; +import io.opentelemetry.exporter.internal.otlp.metrics.ResourceMetricsMarshaler; +import io.opentelemetry.exporter.logging.otlp.internal.writer.JsonWriter; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; +import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import java.util.Collection; +import java.util.StringJoiner; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * Exporter for sending OTLP metrics to stdout. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtlpStdoutMetricExporter implements MetricExporter { + + private static final Logger LOGGER = Logger.getLogger(OtlpStdoutMetricExporter.class.getName()); + + private final AtomicBoolean isShutdown = new AtomicBoolean(); + + private final Logger logger; + private final JsonWriter jsonWriter; + private final boolean wrapperJsonObject; + private final MemoryMode memoryMode; + private final Function, CompletableResultCode> marshaler; + private final AggregationTemporalitySelector aggregationTemporalitySelector; + private final DefaultAggregationSelector defaultAggregationSelector; + + OtlpStdoutMetricExporter( + Logger logger, + JsonWriter jsonWriter, + boolean wrapperJsonObject, + MemoryMode memoryMode, + AggregationTemporalitySelector aggregationTemporalitySelector, + DefaultAggregationSelector defaultAggregationSelector) { + this.logger = logger; + this.jsonWriter = jsonWriter; + this.wrapperJsonObject = wrapperJsonObject; + this.memoryMode = memoryMode; + this.aggregationTemporalitySelector = aggregationTemporalitySelector; + this.defaultAggregationSelector = defaultAggregationSelector; + marshaler = createMarshaler(jsonWriter, memoryMode, wrapperJsonObject); + } + + /** Returns a new {@link OtlpStdoutMetricExporterBuilder}. */ + @SuppressWarnings("SystemOut") + public static OtlpStdoutMetricExporterBuilder builder() { + return new OtlpStdoutMetricExporterBuilder(LOGGER).setOutput(System.out); + } + + private static Function, CompletableResultCode> createMarshaler( + JsonWriter jsonWriter, MemoryMode memoryMode, boolean wrapperJsonObject) { + if (wrapperJsonObject) { + MetricReusableDataMarshaler reusableDataMarshaler = + new MetricReusableDataMarshaler( + memoryMode, (marshaler, numItems) -> jsonWriter.write(marshaler)); + return reusableDataMarshaler::export; + } else { + return metrics -> { + // no support for low allocation marshaler + for (ResourceMetricsMarshaler marshaler : ResourceMetricsMarshaler.create(metrics)) { + CompletableResultCode resultCode = jsonWriter.write(marshaler); + if (!resultCode.isSuccess()) { + // already logged + return resultCode; + } + } + return CompletableResultCode.ofSuccess(); + }; + } + } + + @Override + public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) { + return aggregationTemporalitySelector.getAggregationTemporality(instrumentType); + } + + @Override + public Aggregation getDefaultAggregation(InstrumentType instrumentType) { + return defaultAggregationSelector.getDefaultAggregation(instrumentType); + } + + @Override + public MemoryMode getMemoryMode() { + return memoryMode; + } + + @Override + public CompletableResultCode export(Collection metrics) { + if (isShutdown.get()) { + return CompletableResultCode.ofFailure(); + } + + return marshaler.apply(metrics); + } + + @Override + public CompletableResultCode flush() { + return jsonWriter.flush(); + } + + @Override + public CompletableResultCode shutdown() { + if (!isShutdown.compareAndSet(false, true)) { + logger.log(Level.INFO, "Calling shutdown() multiple times."); + } else { + jsonWriter.close(); + } + return CompletableResultCode.ofSuccess(); + } + + @Override + public String toString() { + StringJoiner joiner = new StringJoiner(", ", "OtlpStdoutMetricExporter{", "}"); + joiner.add("jsonWriter=" + jsonWriter); + joiner.add("wrapperJsonObject=" + wrapperJsonObject); + joiner.add("memoryMode=" + memoryMode); + joiner.add( + "aggregationTemporalitySelector=" + + AggregationTemporalitySelector.asString(aggregationTemporalitySelector)); + joiner.add( + "defaultAggregationSelector=" + + DefaultAggregationSelector.asString(defaultAggregationSelector)); + return joiner.toString(); + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/OtlpStdoutMetricExporterBuilder.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/OtlpStdoutMetricExporterBuilder.java new file mode 100644 index 00000000000..945ffd778bd --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/OtlpStdoutMetricExporterBuilder.java @@ -0,0 +1,142 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.metrics; + +import static java.util.Objects.requireNonNull; + +import io.opentelemetry.exporter.logging.otlp.OtlpJsonLoggingMetricExporter; +import io.opentelemetry.exporter.logging.otlp.internal.writer.JsonWriter; +import io.opentelemetry.exporter.logging.otlp.internal.writer.LoggerJsonWriter; +import io.opentelemetry.exporter.logging.otlp.internal.writer.StreamJsonWriter; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; +import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import java.io.OutputStream; +import java.util.logging.Logger; + +/** + * Builder for {@link OtlpJsonLoggingMetricExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtlpStdoutMetricExporterBuilder { + + private static final String TYPE = "metrics"; + + private static final AggregationTemporalitySelector DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR = + AggregationTemporalitySelector.alwaysCumulative(); + + private AggregationTemporalitySelector aggregationTemporalitySelector = + DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR; + + private DefaultAggregationSelector defaultAggregationSelector = + DefaultAggregationSelector.getDefault(); + + private final Logger logger; + private JsonWriter jsonWriter; + private boolean wrapperJsonObject = true; + private MemoryMode memoryMode = MemoryMode.IMMUTABLE_DATA; + + public OtlpStdoutMetricExporterBuilder(Logger logger) { + this.logger = logger; + this.jsonWriter = new LoggerJsonWriter(logger, TYPE); + } + + /** + * Sets the exporter to use the specified JSON object wrapper. + * + * @param wrapperJsonObject whether to wrap the JSON object in an outer JSON "resourceMetrics" + * object. + */ + public OtlpStdoutMetricExporterBuilder setWrapperJsonObject(boolean wrapperJsonObject) { + this.wrapperJsonObject = wrapperJsonObject; + return this; + } + + /** + * Set the {@link MemoryMode}. If unset, defaults to {@link MemoryMode#IMMUTABLE_DATA}. + * + *

When memory mode is {@link MemoryMode#REUSABLE_DATA}, serialization is optimized to reduce + * memory allocation. + */ + public OtlpStdoutMetricExporterBuilder setMemoryMode(MemoryMode memoryMode) { + this.memoryMode = memoryMode; + return this; + } + + /** + * Sets the exporter to use the specified output stream. + * + *

The output stream will be closed when {@link OtlpStdoutMetricExporter#shutdown()} is called + * unless it's {@link System#out} or {@link System#err}. + * + * @param outputStream the output stream to use. + */ + public OtlpStdoutMetricExporterBuilder setOutput(OutputStream outputStream) { + requireNonNull(outputStream, "outputStream"); + this.jsonWriter = new StreamJsonWriter(outputStream, TYPE); + return this; + } + + /** Sets the exporter to use the specified logger. */ + public OtlpStdoutMetricExporterBuilder setOutput(Logger logger) { + requireNonNull(logger, "logger"); + this.jsonWriter = new LoggerJsonWriter(logger, TYPE); + return this; + } + + /** + * Set the {@link AggregationTemporalitySelector} used for {@link + * MetricExporter#getAggregationTemporality(InstrumentType)}. + * + *

If unset, defaults to {@link AggregationTemporalitySelector#alwaysCumulative()}. + * + *

{@link AggregationTemporalitySelector#deltaPreferred()} is a common configuration for delta + * backends. + */ + public OtlpStdoutMetricExporterBuilder setAggregationTemporalitySelector( + AggregationTemporalitySelector aggregationTemporalitySelector) { + requireNonNull(aggregationTemporalitySelector, "aggregationTemporalitySelector"); + this.aggregationTemporalitySelector = aggregationTemporalitySelector; + return this; + } + + /** + * Set the {@link DefaultAggregationSelector} used for {@link + * MetricExporter#getDefaultAggregation(InstrumentType)}. + * + *

If unset, defaults to {@link DefaultAggregationSelector#getDefault()}. + */ + public OtlpStdoutMetricExporterBuilder setDefaultAggregationSelector( + DefaultAggregationSelector defaultAggregationSelector) { + requireNonNull(defaultAggregationSelector, "defaultAggregationSelector"); + this.defaultAggregationSelector = defaultAggregationSelector; + + return this; + } + + /** + * Constructs a new instance of the exporter based on the builder's values. + * + * @return a new exporter's instance + */ + public OtlpStdoutMetricExporter build() { + if (memoryMode == MemoryMode.REUSABLE_DATA && !wrapperJsonObject) { + throw new IllegalArgumentException( + "Reusable data mode is not supported without wrapperJsonObject"); + } + return new OtlpStdoutMetricExporter( + logger, + jsonWriter, + wrapperJsonObject, + memoryMode, + aggregationTemporalitySelector, + defaultAggregationSelector); + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/OtlpStdoutMetricExporterComponentProvider.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/OtlpStdoutMetricExporterComponentProvider.java new file mode 100644 index 00000000000..c600edbd59d --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/OtlpStdoutMetricExporterComponentProvider.java @@ -0,0 +1,42 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.metrics; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.internal.IncubatingExporterBuilderUtil; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.metrics.export.MetricExporter; + +/** + * Declarative configuration SPI implementation for {@link OtlpStdoutMetricExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtlpStdoutMetricExporterComponentProvider + implements ComponentProvider { + + @Override + public Class getType() { + return MetricExporter.class; + } + + @Override + public String getName() { + return "experimental-otlp/stdout"; + } + + @Override + public MetricExporter create(DeclarativeConfigProperties config) { + OtlpStdoutMetricExporterBuilder builder = OtlpStdoutMetricExporter.builder(); + IncubatingExporterBuilderUtil.configureExporterMemoryMode(config, builder::setMemoryMode); + IncubatingExporterBuilderUtil.configureOtlpAggregationTemporality( + config, builder::setAggregationTemporalitySelector); + IncubatingExporterBuilderUtil.configureOtlpHistogramDefaultAggregation( + config, builder::setDefaultAggregationSelector); + return builder.build(); + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/OtlpStdoutMetricExporterProvider.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/OtlpStdoutMetricExporterProvider.java new file mode 100644 index 00000000000..84f32f56c7b --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/metrics/OtlpStdoutMetricExporterProvider.java @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.metrics; + +import io.opentelemetry.exporter.internal.ExporterBuilderUtil; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider; +import io.opentelemetry.sdk.metrics.export.MetricExporter; + +/** + * {@link MetricExporter} SPI implementation for {@link OtlpStdoutMetricExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtlpStdoutMetricExporterProvider implements ConfigurableMetricExporterProvider { + @Override + public MetricExporter createExporter(ConfigProperties config) { + OtlpStdoutMetricExporterBuilder builder = OtlpStdoutMetricExporter.builder(); + ExporterBuilderUtil.configureExporterMemoryMode(config, builder::setMemoryMode); + ExporterBuilderUtil.configureOtlpAggregationTemporality( + config, builder::setAggregationTemporalitySelector); + ExporterBuilderUtil.configureOtlpHistogramDefaultAggregation( + config, builder::setDefaultAggregationSelector); + return builder.build(); + } + + @Override + public String getName() { + return "experimental-otlp/stdout"; + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/LoggingSpanExporterProvider.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/LoggingSpanExporterProvider.java similarity index 83% rename from exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/LoggingSpanExporterProvider.java rename to exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/LoggingSpanExporterProvider.java index 6ce1856a894..6394acf78aa 100644 --- a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/LoggingSpanExporterProvider.java +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/LoggingSpanExporterProvider.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.exporter.logging.otlp.internal; +package io.opentelemetry.exporter.logging.otlp.internal.traces; import io.opentelemetry.exporter.logging.otlp.OtlpJsonLoggingSpanExporter; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; @@ -16,7 +16,8 @@ *

This class is internal and is hence not for public use. Its APIs are unstable and can change * at any time. */ -public class LoggingSpanExporterProvider implements ConfigurableSpanExporterProvider { +public final class LoggingSpanExporterProvider implements ConfigurableSpanExporterProvider { + @Override public SpanExporter createExporter(ConfigProperties config) { return OtlpJsonLoggingSpanExporter.create(); diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/OtlpStdoutSpanExporter.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/OtlpStdoutSpanExporter.java new file mode 100644 index 00000000000..187cdacc245 --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/OtlpStdoutSpanExporter.java @@ -0,0 +1,109 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.traces; + +import io.opentelemetry.exporter.internal.otlp.traces.ResourceSpansMarshaler; +import io.opentelemetry.exporter.internal.otlp.traces.SpanReusableDataMarshaler; +import io.opentelemetry.exporter.logging.otlp.internal.writer.JsonWriter; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import java.util.Collection; +import java.util.StringJoiner; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * Exporter for sending OTLP spans to stdout. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtlpStdoutSpanExporter implements SpanExporter { + + private static final Logger LOGGER = Logger.getLogger(OtlpStdoutSpanExporter.class.getName()); + + private final AtomicBoolean isShutdown = new AtomicBoolean(); + + private final Logger logger; + private final JsonWriter jsonWriter; + private final boolean wrapperJsonObject; + private final MemoryMode memoryMode; + private final Function, CompletableResultCode> marshaler; + + OtlpStdoutSpanExporter( + Logger logger, JsonWriter jsonWriter, boolean wrapperJsonObject, MemoryMode memoryMode) { + this.logger = logger; + this.jsonWriter = jsonWriter; + this.wrapperJsonObject = wrapperJsonObject; + this.memoryMode = memoryMode; + marshaler = createMarshaler(jsonWriter, memoryMode, wrapperJsonObject); + } + + /** Returns a new {@link OtlpStdoutSpanExporterBuilder}. */ + @SuppressWarnings("SystemOut") + public static OtlpStdoutSpanExporterBuilder builder() { + return new OtlpStdoutSpanExporterBuilder(LOGGER).setOutput(System.out); + } + + private static Function, CompletableResultCode> createMarshaler( + JsonWriter jsonWriter, MemoryMode memoryMode, boolean wrapperJsonObject) { + if (wrapperJsonObject) { + SpanReusableDataMarshaler reusableDataMarshaler = + new SpanReusableDataMarshaler( + memoryMode, (marshaler, numItems) -> jsonWriter.write(marshaler)); + return reusableDataMarshaler::export; + } else { + return spans -> { + // no support for low allocation marshaler + for (ResourceSpansMarshaler marshaler : ResourceSpansMarshaler.create(spans)) { + CompletableResultCode resultCode = jsonWriter.write(marshaler); + if (!resultCode.isSuccess()) { + // already logged + return resultCode; + } + } + return CompletableResultCode.ofSuccess(); + }; + } + } + + @Override + public CompletableResultCode export(Collection spans) { + if (isShutdown.get()) { + return CompletableResultCode.ofFailure(); + } + + return marshaler.apply(spans); + } + + @Override + public CompletableResultCode flush() { + return jsonWriter.flush(); + } + + @Override + public CompletableResultCode shutdown() { + if (!isShutdown.compareAndSet(false, true)) { + logger.log(Level.INFO, "Calling shutdown() multiple times."); + } else { + jsonWriter.close(); + } + return CompletableResultCode.ofSuccess(); + } + + @Override + public String toString() { + StringJoiner joiner = new StringJoiner(", ", "OtlpStdoutSpanExporter{", "}"); + joiner.add("jsonWriter=" + jsonWriter); + joiner.add("wrapperJsonObject=" + wrapperJsonObject); + joiner.add("memoryMode=" + memoryMode); + return joiner.toString(); + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/OtlpStdoutSpanExporterBuilder.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/OtlpStdoutSpanExporterBuilder.java new file mode 100644 index 00000000000..341f63c6e49 --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/OtlpStdoutSpanExporterBuilder.java @@ -0,0 +1,93 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.traces; + +import static java.util.Objects.requireNonNull; + +import io.opentelemetry.exporter.logging.otlp.OtlpJsonLoggingSpanExporter; +import io.opentelemetry.exporter.logging.otlp.internal.writer.JsonWriter; +import io.opentelemetry.exporter.logging.otlp.internal.writer.LoggerJsonWriter; +import io.opentelemetry.exporter.logging.otlp.internal.writer.StreamJsonWriter; +import io.opentelemetry.sdk.common.export.MemoryMode; +import java.io.OutputStream; +import java.util.logging.Logger; + +/** + * Builder for {@link OtlpJsonLoggingSpanExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtlpStdoutSpanExporterBuilder { + + private static final String TYPE = "spans"; + + private final Logger logger; + private JsonWriter jsonWriter; + private boolean wrapperJsonObject = true; + private MemoryMode memoryMode = MemoryMode.IMMUTABLE_DATA; + + public OtlpStdoutSpanExporterBuilder(Logger logger) { + this.logger = logger; + this.jsonWriter = new LoggerJsonWriter(logger, TYPE); + } + + /** + * Sets the exporter to use the specified JSON object wrapper. + * + * @param wrapperJsonObject whether to wrap the JSON object in an outer JSON "resourceSpans" + * object. + */ + public OtlpStdoutSpanExporterBuilder setWrapperJsonObject(boolean wrapperJsonObject) { + this.wrapperJsonObject = wrapperJsonObject; + return this; + } + + /** + * Set the {@link MemoryMode}. If unset, defaults to {@link MemoryMode#IMMUTABLE_DATA}. + * + *

When memory mode is {@link MemoryMode#REUSABLE_DATA}, serialization is optimized to reduce + * memory allocation. + */ + public OtlpStdoutSpanExporterBuilder setMemoryMode(MemoryMode memoryMode) { + this.memoryMode = memoryMode; + return this; + } + + /** + * Sets the exporter to use the specified output stream. + * + *

The output stream will be closed when {@link OtlpStdoutSpanExporter#shutdown()} is called + * unless it's {@link System#out} or {@link System#err}. + * + * @param outputStream the output stream to use. + */ + public OtlpStdoutSpanExporterBuilder setOutput(OutputStream outputStream) { + requireNonNull(outputStream, "outputStream"); + this.jsonWriter = new StreamJsonWriter(outputStream, TYPE); + return this; + } + + /** Sets the exporter to use the specified logger. */ + public OtlpStdoutSpanExporterBuilder setOutput(Logger logger) { + requireNonNull(logger, "logger"); + this.jsonWriter = new LoggerJsonWriter(logger, TYPE); + return this; + } + + /** + * Constructs a new instance of the exporter based on the builder's values. + * + * @return a new exporter's instance + */ + public OtlpStdoutSpanExporter build() { + if (memoryMode == MemoryMode.REUSABLE_DATA && !wrapperJsonObject) { + throw new IllegalArgumentException( + "Reusable data mode is not supported without wrapperJsonObject"); + } + return new OtlpStdoutSpanExporter(logger, jsonWriter, wrapperJsonObject, memoryMode); + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/OtlpStdoutSpanExporterComponentProvider.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/OtlpStdoutSpanExporterComponentProvider.java new file mode 100644 index 00000000000..8a821b3977b --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/OtlpStdoutSpanExporterComponentProvider.java @@ -0,0 +1,38 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.traces; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.internal.IncubatingExporterBuilderUtil; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.trace.export.SpanExporter; + +/** + * Declarative configuration SPI implementation for {@link OtlpStdoutSpanExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtlpStdoutSpanExporterComponentProvider + implements ComponentProvider { + + @Override + public Class getType() { + return SpanExporter.class; + } + + @Override + public String getName() { + return "experimental-otlp/stdout"; + } + + @Override + public SpanExporter create(DeclarativeConfigProperties config) { + OtlpStdoutSpanExporterBuilder builder = OtlpStdoutSpanExporter.builder(); + IncubatingExporterBuilderUtil.configureExporterMemoryMode(config, builder::setMemoryMode); + return builder.build(); + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/OtlpStdoutSpanExporterProvider.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/OtlpStdoutSpanExporterProvider.java new file mode 100644 index 00000000000..84514492f24 --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/traces/OtlpStdoutSpanExporterProvider.java @@ -0,0 +1,31 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.traces; + +import io.opentelemetry.exporter.internal.ExporterBuilderUtil; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider; +import io.opentelemetry.sdk.trace.export.SpanExporter; + +/** + * {@link SpanExporter} SPI implementation for {@link OtlpStdoutSpanExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtlpStdoutSpanExporterProvider implements ConfigurableSpanExporterProvider { + @Override + public SpanExporter createExporter(ConfigProperties config) { + OtlpStdoutSpanExporterBuilder builder = OtlpStdoutSpanExporter.builder(); + ExporterBuilderUtil.configureExporterMemoryMode(config, builder::setMemoryMode); + return builder.build(); + } + + @Override + public String getName() { + return "experimental-otlp/stdout"; + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/JsonUtil.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/writer/JsonUtil.java similarity index 58% rename from exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/JsonUtil.java rename to exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/writer/JsonUtil.java index b4b8cbc577c..0b74ed8a478 100644 --- a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/JsonUtil.java +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/writer/JsonUtil.java @@ -3,18 +3,22 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.exporter.logging.otlp; +package io.opentelemetry.exporter.logging.otlp.internal.writer; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.io.SegmentedStringWriter; import java.io.IOException; -final class JsonUtil { +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ +public final class JsonUtil { - static final JsonFactory JSON_FACTORY = new JsonFactory(); + public static final JsonFactory JSON_FACTORY = new JsonFactory(); - static JsonGenerator create(SegmentedStringWriter stringWriter) { + public static JsonGenerator create(SegmentedStringWriter stringWriter) { try { return JSON_FACTORY.createGenerator(stringWriter); } catch (IOException e) { diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/writer/JsonWriter.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/writer/JsonWriter.java new file mode 100644 index 00000000000..bfee16cba65 --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/writer/JsonWriter.java @@ -0,0 +1,21 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.writer; + +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.sdk.common.CompletableResultCode; + +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ +public interface JsonWriter { + CompletableResultCode write(Marshaler exportRequest); + + CompletableResultCode flush(); + + CompletableResultCode close(); +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/writer/LoggerJsonWriter.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/writer/LoggerJsonWriter.java new file mode 100644 index 00000000000..a95c5e0d2c1 --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/writer/LoggerJsonWriter.java @@ -0,0 +1,65 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.writer; + +import static io.opentelemetry.exporter.logging.otlp.internal.writer.JsonUtil.JSON_FACTORY; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.io.SegmentedStringWriter; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.sdk.common.CompletableResultCode; +import java.io.IOException; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ +public class LoggerJsonWriter implements JsonWriter { + + private final Logger logger; + private final String type; + + public LoggerJsonWriter(Logger logger, String type) { + this.logger = logger; + this.type = type; + } + + @Override + public CompletableResultCode write(Marshaler exportRequest) { + SegmentedStringWriter sw = new SegmentedStringWriter(JSON_FACTORY._getBufferRecycler()); + try (JsonGenerator gen = JsonUtil.create(sw)) { + exportRequest.writeJsonToGenerator(gen); + } catch (IOException e) { + logger.log(Level.WARNING, "Unable to write OTLP JSON " + type, e); + return CompletableResultCode.ofFailure(); + } + + try { + logger.log(Level.INFO, sw.getAndClear()); + return CompletableResultCode.ofSuccess(); + } catch (IOException e) { + logger.log(Level.WARNING, "Unable to write OTLP JSON " + type, e); + return CompletableResultCode.ofFailure(); + } + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode close() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public String toString() { + return "LoggerJsonWriter"; + } +} diff --git a/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/writer/StreamJsonWriter.java b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/writer/StreamJsonWriter.java new file mode 100644 index 00000000000..0f47ce20e09 --- /dev/null +++ b/exporters/logging-otlp/src/main/java/io/opentelemetry/exporter/logging/otlp/internal/writer/StreamJsonWriter.java @@ -0,0 +1,94 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.writer; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerator; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.internal.ThrottlingLogger; +import java.io.IOException; +import java.io.OutputStream; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ +public class StreamJsonWriter implements JsonWriter { + + public static final JsonFactory JSON_FACTORY = new JsonFactory(); + + private static final Logger internalLogger = Logger.getLogger(StreamJsonWriter.class.getName()); + + private final ThrottlingLogger logger = new ThrottlingLogger(internalLogger); + + private final String type; + private final OutputStream outputStream; + + public StreamJsonWriter(OutputStream originalStream, String type) { + this.outputStream = originalStream; + this.type = type; + } + + @Override + public CompletableResultCode write(Marshaler exportRequest) { + try { + exportRequest.writeJsonWithNewline( + JSON_FACTORY + .createGenerator(outputStream) + .disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET)); + return CompletableResultCode.ofSuccess(); + } catch (IOException e) { + logger.log(Level.WARNING, "Unable to write OTLP JSON " + type, e); + return CompletableResultCode.ofFailure(); + } + } + + @Override + public CompletableResultCode flush() { + try { + outputStream.flush(); + return CompletableResultCode.ofSuccess(); + } catch (IOException e) { + logger.log(Level.WARNING, "Failed to flush items", e); + return CompletableResultCode.ofFailure(); + } + } + + @SuppressWarnings("SystemOut") + @Override + public CompletableResultCode close() { + if (outputStream == System.out || outputStream == System.err) { + // closing System.out or System.err is not allowed - it breaks the output stream + return CompletableResultCode.ofSuccess(); + } + try { + outputStream.close(); + return CompletableResultCode.ofSuccess(); + } catch (IOException e) { + logger.log(Level.WARNING, "Failed to close stream", e); + return CompletableResultCode.ofFailure(); + } + } + + @Override + public String toString() { + return "StreamJsonWriter{" + "outputStream=" + getName(outputStream) + '}'; + } + + @SuppressWarnings("SystemOut") + private static String getName(OutputStream outputStream) { + if (outputStream == System.out) { + return "stdout"; + } + if (outputStream == System.err) { + return "stderr"; + } + return outputStream.toString(); + } +} diff --git a/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider b/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider new file mode 100644 index 00000000000..b90ec18cc20 --- /dev/null +++ b/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider @@ -0,0 +1,3 @@ +io.opentelemetry.exporter.logging.otlp.internal.logs.OtlpStdoutLogRecordExporterComponentProvider +io.opentelemetry.exporter.logging.otlp.internal.metrics.OtlpStdoutMetricExporterComponentProvider +io.opentelemetry.exporter.logging.otlp.internal.traces.OtlpStdoutSpanExporterComponentProvider diff --git a/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider b/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider index 9119f54ef00..20763b2e4e5 100644 --- a/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider +++ b/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider @@ -1 +1,2 @@ -io.opentelemetry.exporter.logging.otlp.internal.LoggingLogRecordExporterProvider +io.opentelemetry.exporter.logging.otlp.internal.logs.LoggingLogRecordExporterProvider +io.opentelemetry.exporter.logging.otlp.internal.logs.OtlpStdoutLogRecordExporterProvider diff --git a/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider b/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider index 2b532ca9a38..b1641bbe303 100644 --- a/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider +++ b/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider @@ -1 +1,3 @@ -io.opentelemetry.exporter.logging.otlp.internal.LoggingMetricExporterProvider +io.opentelemetry.exporter.logging.otlp.internal.metrics.LoggingMetricExporterProvider +io.opentelemetry.exporter.logging.otlp.internal.metrics.OtlpStdoutMetricExporterProvider + diff --git a/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider b/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider index fe444f4acf3..d1a8bfa347a 100644 --- a/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider +++ b/exporters/logging-otlp/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider @@ -1 +1,2 @@ -io.opentelemetry.exporter.logging.otlp.internal.LoggingSpanExporterProvider +io.opentelemetry.exporter.logging.otlp.internal.traces.LoggingSpanExporterProvider +io.opentelemetry.exporter.logging.otlp.internal.traces.OtlpStdoutSpanExporterProvider diff --git a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/AbstractOtlpStdoutExporterTest.java b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/AbstractOtlpStdoutExporterTest.java new file mode 100644 index 00000000000..7605cc851dc --- /dev/null +++ b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/AbstractOtlpStdoutExporterTest.java @@ -0,0 +1,379 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonMap; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Streams; +import io.github.netmikey.logunit.api.LogCapturer; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import io.opentelemetry.sdk.common.export.MemoryMode; +import java.io.BufferedOutputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.io.PrintStream; +import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ServiceLoader; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import java.util.stream.Stream; +import javax.annotation.Nullable; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.skyscreamer.jsonassert.JSONAssert; +import org.slf4j.event.LoggingEvent; + +abstract class AbstractOtlpStdoutExporterTest { + + private static PrintStream systemOut; + + private static final String TYPE = "experimental-otlp/stdout"; + + private static final ByteArrayOutputStream SYSTEM_OUT_STREAM = new ByteArrayOutputStream(); + private static final PrintStream SYSTEM_OUT_PRINT_STREAM = new PrintStream(SYSTEM_OUT_STREAM); + + @RegisterExtension LogCapturer logs; + private int skipLogs; + private final String defaultConfigString; + private final TestDataExporter testDataExporter; + protected final Class exporterClass; + private final Class providerClass; + private final Class componentProviderType; + + @TempDir Path tempDir; + + public AbstractOtlpStdoutExporterTest( + TestDataExporter testDataExporter, + Class exporterClass, + Class providerClass, + Class componentProviderType, + String defaultConfigString) { + this.testDataExporter = testDataExporter; + this.exporterClass = exporterClass; + this.providerClass = providerClass; + logs = LogCapturer.create().captureForType(exporterClass); + this.defaultConfigString = defaultConfigString; + this.componentProviderType = componentProviderType; + } + + protected abstract T createExporter( + @Nullable OutputStream outputStream, MemoryMode memoryMode, boolean wrapperJsonObject); + + protected abstract T createDefaultExporter(); + + private String output(@Nullable OutputStream outputStream, @Nullable Path file) { + if (outputStream == null) { + return logs.getEvents().stream() + .skip(skipLogs) + .map(LoggingEvent::getMessage) + .reduce("", (a, b) -> a + b + "\n") + .trim(); + } + + if (file != null) { + try { + return new String(Files.readAllBytes(file), StandardCharsets.UTF_8).trim(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + try { + return SYSTEM_OUT_STREAM.toString(StandardCharsets.UTF_8.name()).trim(); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); + } + } + + @BeforeAll + @SuppressWarnings("SystemOut") + static void setUpStatic() { + systemOut = System.out; + System.setOut(SYSTEM_OUT_PRINT_STREAM); + } + + @AfterAll + @SuppressWarnings("SystemOut") + static void tearDownStatic() { + System.setOut(systemOut); + } + + @BeforeEach + void setUp() { + SYSTEM_OUT_STREAM.reset(); + } + + enum OutputType { + LOGGER, + SYSTEM_OUT, + FILE, + FILE_AND_BUFFERED_WRITER + } + + public static class TestCase { + private final MemoryMode memoryMode; + private final boolean wrapperJsonObject; + private final OutputType outputType; + + public TestCase(OutputType outputType, MemoryMode memoryMode, boolean wrapperJsonObject) { + this.outputType = outputType; + this.memoryMode = memoryMode; + this.wrapperJsonObject = wrapperJsonObject; + } + + public OutputType getOutputType() { + return outputType; + } + + public boolean isWrapperJsonObject() { + return wrapperJsonObject; + } + + public MemoryMode getMemoryMode() { + return memoryMode; + } + } + + static Stream exportTestCases() { + return ImmutableList.of( + testCase(OutputType.SYSTEM_OUT, MemoryMode.IMMUTABLE_DATA, /* wrapperJsonObject= */ true), + testCase(OutputType.SYSTEM_OUT, MemoryMode.IMMUTABLE_DATA, /* wrapperJsonObject= */ false), + testCase(OutputType.FILE, MemoryMode.IMMUTABLE_DATA, /* wrapperJsonObject= */ true), + testCase(OutputType.FILE, MemoryMode.IMMUTABLE_DATA, /* wrapperJsonObject= */ false), + testCase( + OutputType.FILE_AND_BUFFERED_WRITER, + MemoryMode.IMMUTABLE_DATA, + /* wrapperJsonObject= */ true), + testCase( + OutputType.FILE_AND_BUFFERED_WRITER, + MemoryMode.IMMUTABLE_DATA, + /* wrapperJsonObject= */ false), + testCase(OutputType.LOGGER, MemoryMode.IMMUTABLE_DATA, /* wrapperJsonObject= */ true), + testCase(OutputType.LOGGER, MemoryMode.IMMUTABLE_DATA, /* wrapperJsonObject= */ false), + testCase(OutputType.SYSTEM_OUT, MemoryMode.REUSABLE_DATA, /* wrapperJsonObject= */ true), + testCase(OutputType.SYSTEM_OUT, MemoryMode.REUSABLE_DATA, /* wrapperJsonObject= */ false), + testCase(OutputType.FILE, MemoryMode.REUSABLE_DATA, /* wrapperJsonObject= */ true), + testCase(OutputType.FILE, MemoryMode.REUSABLE_DATA, /* wrapperJsonObject= */ false), + testCase( + OutputType.FILE_AND_BUFFERED_WRITER, + MemoryMode.REUSABLE_DATA, + /* wrapperJsonObject= */ true), + testCase( + OutputType.FILE_AND_BUFFERED_WRITER, + MemoryMode.REUSABLE_DATA, + /* wrapperJsonObject= */ false), + testCase(OutputType.LOGGER, MemoryMode.REUSABLE_DATA, /* wrapperJsonObject= */ true), + testCase(OutputType.LOGGER, MemoryMode.REUSABLE_DATA, /* wrapperJsonObject= */ false)) + .stream(); + } + + private static Arguments testCase( + OutputType type, MemoryMode memoryMode, boolean wrapperJsonObject) { + return Arguments.of( + "output=" + + type + + ", wrapperJsonObject=" + + wrapperJsonObject + + ", memoryMode=" + + memoryMode, + new TestCase(type, memoryMode, wrapperJsonObject)); + } + + @SuppressWarnings("SystemOut") + @ParameterizedTest(name = "{0}") + @MethodSource("exportTestCases") + void exportWithProgrammaticConfig(String name, TestCase testCase) throws Exception { + OutputStream outputStream; + Path file = null; + switch (testCase.getOutputType()) { + case LOGGER: + outputStream = null; + break; + case SYSTEM_OUT: + outputStream = System.out; + break; + case FILE: + file = tempDir.resolve("test.log"); + outputStream = Files.newOutputStream(file); + break; + case FILE_AND_BUFFERED_WRITER: + file = tempDir.resolve("test.log"); + outputStream = new BufferedOutputStream(Files.newOutputStream(file)); + break; + default: + throw new IllegalStateException("Unexpected value: " + testCase.getOutputType()); + } + + Supplier exporter = + () -> + createExporter(outputStream, testCase.getMemoryMode(), testCase.isWrapperJsonObject()); + + if (testCase.getMemoryMode() == MemoryMode.REUSABLE_DATA && !testCase.isWrapperJsonObject()) { + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(exporter::get) + .withMessage("Reusable data mode is not supported without wrapperJsonObject"); + return; + } + + testDataExporter.export(exporter.get()); + + String output = output(outputStream, file); + String expectedJson = testDataExporter.getExpectedJson(testCase.isWrapperJsonObject()); + JSONAssert.assertEquals("Got \n" + output, expectedJson, output, false); + + if (testCase.isWrapperJsonObject()) { + assertThat(output).doesNotContain("\n"); + } + + if (file == null) { + // no need to test again for file - and it's not working with files + assertDoubleOutput(exporter, expectedJson, outputStream); + } + } + + private void assertDoubleOutput( + Supplier exporter, String expectedJson, @Nullable OutputStream outputStream) + throws Exception { + SYSTEM_OUT_STREAM.reset(); + skipLogs = logs.getEvents().size(); + testDataExporter.export(exporter.get()); + testDataExporter.export(exporter.get()); + + String[] lines = output(outputStream, null).split("\n"); + assertThat(lines).hasSize(2); + for (String line : lines) { + JSONAssert.assertEquals("Got \n" + line, expectedJson, line, false); + } + } + + @Test + void testShutdown() { + T exporter = createDefaultExporter(); + assertThat(testDataExporter.shutdown(exporter).isSuccess()).isTrue(); + assertThat(testDataExporter.export(exporter).join(10, TimeUnit.SECONDS).isSuccess()).isFalse(); + assertThat(testDataExporter.flush(exporter).join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); + assertThat(output(null, null)).isEmpty(); + assertThat(testDataExporter.shutdown(exporter).isSuccess()).isTrue(); + logs.assertContains("Calling shutdown() multiple times."); + } + + @Test + void defaultToString() { + assertThat(createDefaultExporter()).hasToString(defaultConfigString); + + assertThat(exporterFromProvider(DefaultConfigProperties.createFromMap(emptyMap()))) + .hasToString(defaultConfigString); + } + + @Test + void providerConfig() { + assertThat( + exporterFromProvider( + DefaultConfigProperties.createFromMap( + singletonMap("otel.java.exporter.memory_mode", "immutable_data")))) + .extracting("memoryMode") + .isEqualTo(MemoryMode.IMMUTABLE_DATA); + assertThat( + exporterFromProvider( + DefaultConfigProperties.createFromMap( + singletonMap("otel.java.exporter.memory_mode", "reusable_data")))) + .extracting("memoryMode") + .isEqualTo(MemoryMode.REUSABLE_DATA); + } + + @Test + void componentProviderConfig() { + DeclarativeConfigProperties properties = mock(DeclarativeConfigProperties.class); + T exporter = exporterFromComponentProvider(properties); + + assertThat(exporter).extracting("wrapperJsonObject").isEqualTo(true); + assertThat(exporter).extracting("memoryMode").isEqualTo(MemoryMode.IMMUTABLE_DATA); + assertThat(exporter) + .extracting("jsonWriter") + .extracting(Object::toString) + .isEqualTo("StreamJsonWriter{outputStream=stdout}"); + + when(properties.getString("memory_mode")).thenReturn("IMMUTABLE_DATA"); + assertThat(exporterFromComponentProvider(properties)) + .extracting("memoryMode") + .isEqualTo(MemoryMode.IMMUTABLE_DATA); + + when(properties.getString("memory_mode")).thenReturn("REUSABLE_DATA"); + assertThat(exporterFromComponentProvider(properties)) + .extracting("memoryMode") + .isEqualTo(MemoryMode.REUSABLE_DATA); + } + + @SuppressWarnings("unchecked") + protected T exporterFromComponentProvider(DeclarativeConfigProperties properties) { + return (T) + ((ComponentProvider) + loadSpi(ComponentProvider.class) + .filter( + p -> { + ComponentProvider c = (ComponentProvider) p; + return "experimental-otlp/stdout".equals(c.getName()) + && c.getType().equals(componentProviderType); + }) + .findFirst() + .orElseThrow(() -> new IllegalStateException("No provider found"))) + .create(properties); + } + + @SuppressWarnings("unchecked") + protected T exporterFromProvider(ConfigProperties config) { + Object provider = loadProvider(); + + try { + return (T) + provider + .getClass() + .getDeclaredMethod("createExporter", ConfigProperties.class) + .invoke(provider, config); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private Object loadProvider() { + return loadSpi(providerClass) + .filter( + p -> { + try { + return AbstractOtlpStdoutExporterTest.TYPE.equals( + p.getClass().getDeclaredMethod("getName").invoke(p)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }) + .findFirst() + .orElseThrow(() -> new IllegalStateException("No provider found")); + } + + protected static Stream loadSpi(Class type) { + return Streams.stream(ServiceLoader.load(type, type.getClassLoader()).iterator()); + } +} diff --git a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingLogRecordExporterTest.java b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingLogRecordExporterTest.java index 27ce4c3ea99..75677620dbb 100644 --- a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingLogRecordExporterTest.java +++ b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingLogRecordExporterTest.java @@ -5,25 +5,11 @@ package io.opentelemetry.exporter.logging.otlp; -import static io.opentelemetry.api.common.AttributeKey.booleanKey; -import static io.opentelemetry.api.common.AttributeKey.longKey; -import static io.opentelemetry.api.common.AttributeKey.stringKey; import static org.assertj.core.api.Assertions.assertThat; import io.github.netmikey.logunit.api.LogCapturer; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.logs.Severity; -import io.opentelemetry.api.trace.SpanContext; -import io.opentelemetry.api.trace.TraceFlags; -import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; -import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.logs.data.LogRecordData; import io.opentelemetry.sdk.logs.export.LogRecordExporter; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.testing.logs.TestLogRecordData; -import java.util.Arrays; -import java.util.Collections; import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -34,49 +20,7 @@ @SuppressLogger(OtlpJsonLoggingLogRecordExporter.class) class OtlpJsonLoggingLogRecordExporterTest { - private static final Resource RESOURCE = - Resource.create(Attributes.builder().put("key", "value").build()); - - private static final LogRecordData LOG1 = - TestLogRecordData.builder() - .setResource(RESOURCE) - .setInstrumentationScopeInfo( - InstrumentationScopeInfo.builder("instrumentation") - .setVersion("1") - .setAttributes(Attributes.builder().put("key", "value").build()) - .build()) - .setBody("body1") - .setSeverity(Severity.INFO) - .setSeverityText("INFO") - .setTimestamp(100L, TimeUnit.NANOSECONDS) - .setObservedTimestamp(200L, TimeUnit.NANOSECONDS) - .setAttributes(Attributes.of(stringKey("animal"), "cat", longKey("lives"), 9L)) - .setSpanContext( - SpanContext.create( - "12345678876543211234567887654322", - "8765432112345876", - TraceFlags.getDefault(), - TraceState.getDefault())) - .build(); - - private static final LogRecordData LOG2 = - TestLogRecordData.builder() - .setResource(RESOURCE) - .setInstrumentationScopeInfo( - InstrumentationScopeInfo.builder("instrumentation2").setVersion("2").build()) - .setBody("body2") - .setSeverity(Severity.INFO) - .setSeverityText("INFO") - .setTimestamp(100L, TimeUnit.NANOSECONDS) - .setObservedTimestamp(200L, TimeUnit.NANOSECONDS) - .setAttributes(Attributes.of(booleanKey("important"), true)) - .setSpanContext( - SpanContext.create( - "12345678876543211234567887654322", - "8765432112345875", - TraceFlags.getDefault(), - TraceState.getDefault())) - .build(); + private final TestDataExporter testDataExporter = TestDataExporter.forLogs(); @RegisterExtension LogCapturer logs = LogCapturer.create().captureForType(OtlpJsonLoggingLogRecordExporter.class); @@ -90,90 +34,21 @@ void setUp() { @Test void log() throws Exception { - exporter.export(Arrays.asList(LOG1, LOG2)); + testDataExporter.export(exporter); assertThat(logs.getEvents()) .hasSize(1) .allSatisfy(log -> assertThat(log.getLevel()).isEqualTo(Level.INFO)); String message = logs.getEvents().get(0).getMessage(); - JSONAssert.assertEquals( - "{" - + " \"resource\": {" - + " \"attributes\": [{" - + " \"key\":\"key\"," - + " \"value\": {" - + " \"stringValue\":\"value\"" - + " }" - + " }]" - + " }," - + " \"scopeLogs\": [{" - + " \"scope\":{" - + " \"name\":\"instrumentation2\"," - + " \"version\":\"2\"" - + " }," - + " \"logRecords\": [{" - + " \"timeUnixNano\":\"100\"," - + " \"observedTimeUnixNano\":\"200\"," - + " \"severityNumber\":9," - + " \"severityText\":\"INFO\"," - + " \"body\": {" - + " \"stringValue\":\"body2\"" - + " }," - + " \"attributes\": [{" - + " \"key\":\"important\"," - + " \"value\": {" - + " \"boolValue\":true" - + " }" - + " }]," - + " \"traceId\":\"12345678876543211234567887654322\"," - + " \"spanId\":\"8765432112345875\"" - + " }]" - + " }, {" - + " \"scope\": {" - + " \"name\":\"instrumentation\"," - + " \"version\":\"1\"," - + " \"attributes\": [{" - + " \"key\":\"key\"," - + " \"value\": {" - + " \"stringValue\":\"value\"" - + " }" - + " }]" - + " }," - + " \"logRecords\": [{" - + " \"timeUnixNano\":\"100\"," - + " \"observedTimeUnixNano\":\"200\"," - + " \"severityNumber\":9," - + " \"severityText\":\"INFO\"," - + " \"body\": {" - + " \"stringValue\":\"body1\"" - + " }," - + " \"attributes\": [{" - + " \"key\":\"animal\"," - + " \"value\": {" - + " \"stringValue\":\"cat\"" - + " }" - + " }, {" - + " \"key\":\"lives\"," - + " \"value\":{" - + " \"intValue\":\"9\"" - + " }" - + " }]," - + " \"traceId\":\"12345678876543211234567887654322\"," - + " \"spanId\":\"8765432112345876\"" - + " }]" - + " }]" - + "}", - message, - /* strict= */ false); + String expectedJson = testDataExporter.getExpectedJson(false); + JSONAssert.assertEquals("Got \n" + message, expectedJson, message, /* strict= */ false); assertThat(message).doesNotContain("\n"); } @Test void shutdown() { assertThat(exporter.shutdown().isSuccess()).isTrue(); - assertThat( - exporter.export(Collections.singletonList(LOG1)).join(10, TimeUnit.SECONDS).isSuccess()) - .isFalse(); + assertThat(testDataExporter.export(exporter).join(10, TimeUnit.SECONDS).isSuccess()).isFalse(); assertThat(logs.getEvents()).isEmpty(); assertThat(exporter.shutdown().isSuccess()).isTrue(); logs.assertContains("Calling shutdown() multiple times."); diff --git a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingMetricExporterTest.java b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingMetricExporterTest.java index 287ec7b74a1..ccbfdb26dce 100644 --- a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingMetricExporterTest.java +++ b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingMetricExporterTest.java @@ -5,22 +5,13 @@ package io.opentelemetry.exporter.logging.otlp; -import static io.opentelemetry.api.common.AttributeKey.stringKey; import static org.assertj.core.api.Assertions.assertThat; import io.github.netmikey.logunit.api.LogCapturer; -import io.opentelemetry.api.common.Attributes; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; -import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; -import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.export.MetricExporter; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; -import io.opentelemetry.sdk.resources.Resource; -import java.util.Arrays; import java.util.Collections; import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.BeforeEach; @@ -32,39 +23,7 @@ @SuppressLogger(OtlpJsonLoggingMetricExporter.class) class OtlpJsonLoggingMetricExporterTest { - private static final Resource RESOURCE = - Resource.create(Attributes.builder().put("key", "value").build()); - - private static final MetricData METRIC1 = - ImmutableMetricData.createDoubleSum( - RESOURCE, - InstrumentationScopeInfo.builder("instrumentation") - .setVersion("1") - .setAttributes(Attributes.builder().put("key", "value").build()) - .build(), - "metric1", - "metric1 description", - "m", - ImmutableSumData.create( - true, - AggregationTemporality.CUMULATIVE, - Arrays.asList( - ImmutableDoublePointData.create( - 1, 2, Attributes.of(stringKey("cat"), "meow"), 4)))); - - private static final MetricData METRIC2 = - ImmutableMetricData.createDoubleSum( - RESOURCE, - InstrumentationScopeInfo.builder("instrumentation2").setVersion("2").build(), - "metric2", - "metric2 description", - "s", - ImmutableSumData.create( - true, - AggregationTemporality.CUMULATIVE, - Arrays.asList( - ImmutableDoublePointData.create( - 1, 2, Attributes.of(stringKey("cat"), "meow"), 4)))); + private final TestDataExporter testDataExporter = TestDataExporter.forMetrics(); @RegisterExtension LogCapturer logs = LogCapturer.create().captureForType(OtlpJsonLoggingMetricExporter.class); @@ -90,78 +49,15 @@ void getAggregationTemporality() { @Test void log() throws Exception { - exporter.export(Arrays.asList(METRIC1, METRIC2)); + testDataExporter.export(exporter); assertThat(logs.getEvents()) .hasSize(1) .allSatisfy(log -> assertThat(log.getLevel()).isEqualTo(Level.INFO)); - JSONAssert.assertEquals( - "{" - + " \"resource\": {" - + " \"attributes\": [{" - + " \"key\": \"key\"," - + " \"value\": {" - + " \"stringValue\": \"value\"" - + " }" - + " }]" - + " }," - + " \"scopeMetrics\": [{" - + " \"scope\": {" - + " \"name\": \"instrumentation2\"," - + " \"version\": \"2\"" - + " }," - + " \"metrics\": [{" - + " \"name\": \"metric2\"," - + " \"description\": \"metric2 description\"," - + " \"unit\": \"s\"," - + " \"sum\": {" - + " \"dataPoints\": [{" - + " \"attributes\": [{" - + " \"key\": \"cat\"," - + " \"value\": {\"stringValue\": \"meow\"}" - + " }]," - + " \"startTimeUnixNano\": \"1\"," - + " \"timeUnixNano\": \"2\"," - + " \"asDouble\": 4.0" - + " }]," - + " \"aggregationTemporality\": 2," - + " \"isMonotonic\": true" - + " }" - + " }]" - + " }, {" - + " \"scope\": {" - + " \"name\": \"instrumentation\"," - + " \"version\": \"1\"," - + " \"attributes\":[{" - + " \"key\":\"key\"," - + " \"value\":{" - + " \"stringValue\":\"value\"" - + " }" - + " }]" - + " }," - + " \"metrics\": [{" - + " \"name\": \"metric1\"," - + " \"description\": \"metric1 description\"," - + " \"unit\": \"m\"," - + " \"sum\": {" - + " \"dataPoints\": [{" - + " \"attributes\": [{" - + " \"key\": \"cat\"," - + " \"value\": {\"stringValue\": \"meow\"}" - + " }]," - + " \"startTimeUnixNano\": \"1\"," - + " \"timeUnixNano\": \"2\"," - + " \"asDouble\": 4.0" - + " }]," - + " \"aggregationTemporality\": 2," - + " \"isMonotonic\": true" - + " }" - + " }]" - + " }]" - + "}", - logs.getEvents().get(0).getMessage(), - /* strict= */ false); - assertThat(logs.getEvents().get(0).getMessage()).doesNotContain("\n"); + String message = logs.getEvents().get(0).getMessage(); + String expectedJson = testDataExporter.getExpectedJson(false); + JSONAssert.assertEquals("Got \n" + message, expectedJson, message, /* strict= */ false); + assertThat(message).doesNotContain("\n"); } @Test @@ -174,7 +70,7 @@ void shutdown() { assertThat(exporter.shutdown().isSuccess()).isTrue(); assertThat( exporter - .export(Collections.singletonList(METRIC1)) + .export(Collections.singletonList(TestDataExporter.METRIC1)) .join(10, TimeUnit.SECONDS) .isSuccess()) .isFalse(); diff --git a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingSpanExporterTest.java b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingSpanExporterTest.java index 309cf7afaf0..ac3129f4a1c 100644 --- a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingSpanExporterTest.java +++ b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpJsonLoggingSpanExporterTest.java @@ -5,26 +5,11 @@ package io.opentelemetry.exporter.logging.otlp; -import static io.opentelemetry.api.common.AttributeKey.booleanKey; -import static io.opentelemetry.api.common.AttributeKey.longKey; -import static io.opentelemetry.api.common.AttributeKey.stringKey; import static org.assertj.core.api.Assertions.assertThat; import io.github.netmikey.logunit.api.LogCapturer; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.trace.SpanContext; -import io.opentelemetry.api.trace.SpanKind; -import io.opentelemetry.api.trace.TraceFlags; -import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; -import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.testing.trace.TestSpanData; -import io.opentelemetry.sdk.trace.data.EventData; -import io.opentelemetry.sdk.trace.data.SpanData; -import io.opentelemetry.sdk.trace.data.StatusData; import io.opentelemetry.sdk.trace.export.SpanExporter; -import java.util.Arrays; import java.util.Collections; import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.BeforeEach; @@ -36,59 +21,7 @@ @SuppressLogger(OtlpJsonLoggingSpanExporter.class) class OtlpJsonLoggingSpanExporterTest { - private static final Resource RESOURCE = - Resource.create(Attributes.builder().put("key", "value").build()); - - private static final SpanData SPAN1 = - TestSpanData.builder() - .setHasEnded(true) - .setSpanContext( - SpanContext.create( - "12345678876543211234567887654321", - "8765432112345678", - TraceFlags.getSampled(), - TraceState.getDefault())) - .setStartEpochNanos(100) - .setEndEpochNanos(100 + 1000) - .setStatus(StatusData.ok()) - .setName("testSpan1") - .setKind(SpanKind.INTERNAL) - .setAttributes(Attributes.of(stringKey("animal"), "cat", longKey("lives"), 9L)) - .setEvents( - Collections.singletonList( - EventData.create( - 100 + 500, - "somethingHappenedHere", - Attributes.of(booleanKey("important"), true)))) - .setTotalAttributeCount(2) - .setTotalRecordedEvents(1) - .setTotalRecordedLinks(0) - .setInstrumentationScopeInfo( - InstrumentationScopeInfo.builder("instrumentation") - .setVersion("1") - .setAttributes(Attributes.builder().put("key", "value").build()) - .build()) - .setResource(RESOURCE) - .build(); - - private static final SpanData SPAN2 = - TestSpanData.builder() - .setHasEnded(false) - .setSpanContext( - SpanContext.create( - "12340000000043211234000000004321", - "8765000000005678", - TraceFlags.getSampled(), - TraceState.getDefault())) - .setStartEpochNanos(500) - .setEndEpochNanos(500 + 1001) - .setStatus(StatusData.error()) - .setName("testSpan2") - .setKind(SpanKind.CLIENT) - .setResource(RESOURCE) - .setInstrumentationScopeInfo( - InstrumentationScopeInfo.builder("instrumentation2").setVersion("2").build()) - .build(); + private final TestDataExporter testDataExporter = TestDataExporter.forSpans(); @RegisterExtension LogCapturer logs = LogCapturer.create().captureForType(OtlpJsonLoggingSpanExporter.class); @@ -102,85 +35,14 @@ void setUp() { @Test void log() throws Exception { - exporter.export(Arrays.asList(SPAN1, SPAN2)); + testDataExporter.export(exporter); assertThat(logs.getEvents()) .hasSize(1) .allSatisfy(log -> assertThat(log.getLevel()).isEqualTo(Level.INFO)); String message = logs.getEvents().get(0).getMessage(); - JSONAssert.assertEquals( - "{" - + " \"resource\": {" - + " \"attributes\": [{" - + " \"key\": \"key\"," - + " \"value\": {" - + " \"stringValue\": \"value\"" - + " }" - + " }]" - + " }," - + " \"scopeSpans\": [{" - + " \"scope\": {" - + " \"name\": \"instrumentation2\"," - + " \"version\": \"2\"" - + " }," - + " \"spans\": [{" - + " \"traceId\": \"12340000000043211234000000004321\"," - + " \"spanId\": \"8765000000005678\"," - + " \"name\": \"testSpan2\"," - + " \"kind\": 3," - + " \"startTimeUnixNano\": \"500\"," - + " \"endTimeUnixNano\": \"1501\"," - + " \"status\": {" - + " \"code\": 2" - + " }" - + " }]" - + " }, {" - + " \"scope\": {" - + " \"name\": \"instrumentation\"," - + " \"version\": \"1\"," - + " \"attributes\":[{" - + " \"key\":\"key\"," - + " \"value\":{" - + " \"stringValue\":\"value\"" - + " }" - + " }]" - + " }," - + " \"spans\": [{" - + " \"traceId\": \"12345678876543211234567887654321\"," - + " \"spanId\": \"8765432112345678\"," - + " \"name\": \"testSpan1\"," - + " \"kind\": 1," - + " \"startTimeUnixNano\": \"100\"," - + " \"endTimeUnixNano\": \"1100\"," - + " \"attributes\": [{" - + " \"key\": \"animal\"," - + " \"value\": {" - + " \"stringValue\": \"cat\"" - + " }" - + " }, {" - + " \"key\": \"lives\"," - + " \"value\": {" - + " \"intValue\": \"9\"" - + " }" - + " }]," - + " \"events\": [{" - + " \"timeUnixNano\": \"600\"," - + " \"name\": \"somethingHappenedHere\"," - + " \"attributes\": [{" - + " \"key\": \"important\"," - + " \"value\": {" - + " \"boolValue\": true" - + " }" - + " }]" - + " }]," - + " \"status\": {" - + " \"code\": 1" - + " }" - + " }]" - + " }]" - + "}", - message, - /* strict= */ false); + String expectedJson = testDataExporter.getExpectedJson(false); + JSONAssert.assertEquals("Got \n" + message, expectedJson, message, /* strict= */ false); assertThat(message).doesNotContain("\n"); } @@ -194,7 +56,7 @@ void shutdown() { assertThat(exporter.shutdown().isSuccess()).isTrue(); assertThat( exporter - .export(Collections.singletonList(SPAN1)) + .export(Collections.singletonList(TestDataExporter.SPAN1)) .join(10, TimeUnit.SECONDS) .isSuccess()) .isFalse(); diff --git a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpStdoutLogRecordExporterTest.java b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpStdoutLogRecordExporterTest.java new file mode 100644 index 00000000000..c19fba0fe3e --- /dev/null +++ b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpStdoutLogRecordExporterTest.java @@ -0,0 +1,48 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp; + +import io.opentelemetry.exporter.logging.otlp.internal.logs.OtlpStdoutLogRecordExporter; +import io.opentelemetry.exporter.logging.otlp.internal.logs.OtlpStdoutLogRecordExporterBuilder; +import io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; +import java.io.OutputStream; +import java.util.logging.Logger; +import javax.annotation.Nullable; + +class OtlpStdoutLogRecordExporterTest + extends AbstractOtlpStdoutExporterTest { + + public OtlpStdoutLogRecordExporterTest() { + super( + TestDataExporter.forLogs(), + OtlpStdoutLogRecordExporter.class, + ConfigurableLogRecordExporterProvider.class, + LogRecordExporter.class, + "OtlpStdoutLogRecordExporter{jsonWriter=StreamJsonWriter{outputStream=stdout}, wrapperJsonObject=true, memoryMode=IMMUTABLE_DATA}"); + } + + @Override + protected OtlpStdoutLogRecordExporter createDefaultExporter() { + return OtlpStdoutLogRecordExporter.builder().build(); + } + + @Override + protected OtlpStdoutLogRecordExporter createExporter( + @Nullable OutputStream outputStream, MemoryMode memoryMode, boolean wrapperJsonObject) { + OtlpStdoutLogRecordExporterBuilder builder = + OtlpStdoutLogRecordExporter.builder() + .setMemoryMode(memoryMode) + .setWrapperJsonObject(wrapperJsonObject); + if (outputStream != null) { + builder.setOutput(outputStream); + } else { + builder.setOutput(Logger.getLogger(exporterClass.getName())); + } + return builder.build(); + } +} diff --git a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpStdoutMetricExporterTest.java b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpStdoutMetricExporterTest.java new file mode 100644 index 00000000000..df91f1e2cba --- /dev/null +++ b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpStdoutMetricExporterTest.java @@ -0,0 +1,126 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.google.common.collect.ImmutableMap; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.logging.otlp.internal.metrics.OtlpStdoutMetricExporter; +import io.opentelemetry.exporter.logging.otlp.internal.metrics.OtlpStdoutMetricExporterBuilder; +import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; +import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import java.io.OutputStream; +import java.util.logging.Logger; +import javax.annotation.Nullable; +import org.junit.jupiter.api.Test; + +class OtlpStdoutMetricExporterTest + extends AbstractOtlpStdoutExporterTest { + + public OtlpStdoutMetricExporterTest() { + super( + TestDataExporter.forMetrics(), + OtlpStdoutMetricExporter.class, + ConfigurableMetricExporterProvider.class, + MetricExporter.class, + "OtlpStdoutMetricExporter{jsonWriter=StreamJsonWriter{outputStream=stdout}, wrapperJsonObject=true, memoryMode=IMMUTABLE_DATA, aggregationTemporalitySelector=AggregationTemporalitySelector{COUNTER=CUMULATIVE, UP_DOWN_COUNTER=CUMULATIVE, HISTOGRAM=CUMULATIVE, OBSERVABLE_COUNTER=CUMULATIVE, OBSERVABLE_UP_DOWN_COUNTER=CUMULATIVE, OBSERVABLE_GAUGE=CUMULATIVE, GAUGE=CUMULATIVE}, defaultAggregationSelector=DefaultAggregationSelector{COUNTER=default, UP_DOWN_COUNTER=default, HISTOGRAM=default, OBSERVABLE_COUNTER=default, OBSERVABLE_UP_DOWN_COUNTER=default, OBSERVABLE_GAUGE=default, GAUGE=default}}"); + } + + @Override + protected OtlpStdoutMetricExporter createDefaultExporter() { + return OtlpStdoutMetricExporter.builder().build(); + } + + @Override + protected OtlpStdoutMetricExporter createExporter( + @Nullable OutputStream outputStream, MemoryMode memoryMode, boolean wrapperJsonObject) { + OtlpStdoutMetricExporterBuilder builder = + OtlpStdoutMetricExporter.builder() + .setMemoryMode(memoryMode) + .setWrapperJsonObject(wrapperJsonObject); + if (outputStream != null) { + builder.setOutput(outputStream); + } else { + builder.setOutput(Logger.getLogger(exporterClass.getName())); + } + return builder.build(); + } + + /** Test configuration specific to metric exporter. */ + @Test + void providerMetricConfig() { + OtlpStdoutMetricExporter exporter = + exporterFromProvider( + DefaultConfigProperties.createFromMap( + ImmutableMap.of( + "otel.exporter.otlp.metrics.temporality.preference", + "DELTA", + "otel.exporter.otlp.metrics.default.histogram.aggregation", + "BASE2_EXPONENTIAL_BUCKET_HISTOGRAM"))); + + assertThat(exporter.getAggregationTemporality(InstrumentType.COUNTER)) + .isEqualTo(AggregationTemporality.DELTA); + + assertThat(exporter.getDefaultAggregation(InstrumentType.HISTOGRAM)) + .isEqualTo(Aggregation.base2ExponentialBucketHistogram()); + } + + @Test + void componentProviderMetricConfig() { + DeclarativeConfigProperties properties = mock(DeclarativeConfigProperties.class); + when(properties.getString("temporality_preference")).thenReturn("DELTA"); + when(properties.getString("default_histogram_aggregation")) + .thenReturn("BASE2_EXPONENTIAL_BUCKET_HISTOGRAM"); + + OtlpStdoutMetricExporter exporter = exporterFromComponentProvider(properties); + assertThat(exporter.getAggregationTemporality(InstrumentType.COUNTER)) + .isEqualTo(AggregationTemporality.DELTA); + + assertThat(exporter.getDefaultAggregation(InstrumentType.HISTOGRAM)) + .isEqualTo(Aggregation.base2ExponentialBucketHistogram()); + } + + @Test + void validMetricConfig() { + assertThatCode( + () -> + OtlpStdoutMetricExporter.builder() + .setAggregationTemporalitySelector( + AggregationTemporalitySelector.deltaPreferred())) + .doesNotThrowAnyException(); + assertThat( + OtlpStdoutMetricExporter.builder() + .setAggregationTemporalitySelector(AggregationTemporalitySelector.deltaPreferred()) + .build() + .getAggregationTemporality(InstrumentType.COUNTER)) + .isEqualTo(AggregationTemporality.DELTA); + assertThat( + OtlpStdoutMetricExporter.builder() + .build() + .getAggregationTemporality(InstrumentType.COUNTER)) + .isEqualTo(AggregationTemporality.CUMULATIVE); + + assertThat( + OtlpStdoutMetricExporter.builder() + .setDefaultAggregationSelector( + DefaultAggregationSelector.getDefault() + .with(InstrumentType.HISTOGRAM, Aggregation.drop())) + .build() + .getDefaultAggregation(InstrumentType.HISTOGRAM)) + .isEqualTo(Aggregation.drop()); + } +} diff --git a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpStdoutSpanExporterTest.java b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpStdoutSpanExporterTest.java new file mode 100644 index 00000000000..01d3a96ccd2 --- /dev/null +++ b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/OtlpStdoutSpanExporterTest.java @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp; + +import io.opentelemetry.exporter.logging.otlp.internal.traces.OtlpStdoutSpanExporter; +import io.opentelemetry.exporter.logging.otlp.internal.traces.OtlpStdoutSpanExporterBuilder; +import io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import java.io.OutputStream; +import java.util.logging.Logger; +import javax.annotation.Nullable; + +class OtlpStdoutSpanExporterTest extends AbstractOtlpStdoutExporterTest { + + public OtlpStdoutSpanExporterTest() { + super( + TestDataExporter.forSpans(), + OtlpStdoutSpanExporter.class, + ConfigurableSpanExporterProvider.class, + SpanExporter.class, + "OtlpStdoutSpanExporter{jsonWriter=StreamJsonWriter{outputStream=stdout}, wrapperJsonObject=true, memoryMode=IMMUTABLE_DATA}"); + } + + @Override + protected OtlpStdoutSpanExporter createDefaultExporter() { + return OtlpStdoutSpanExporter.builder().build(); + } + + @Override + protected OtlpStdoutSpanExporter createExporter( + @Nullable OutputStream outputStream, MemoryMode memoryMode, boolean wrapperJsonObject) { + OtlpStdoutSpanExporterBuilder builder = + OtlpStdoutSpanExporter.builder() + .setMemoryMode(memoryMode) + .setWrapperJsonObject(wrapperJsonObject); + if (outputStream != null) { + builder.setOutput(outputStream); + } else { + builder.setOutput(Logger.getLogger(exporterClass.getName())); + } + return builder.build(); + } +} diff --git a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/TestDataExporter.java b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/TestDataExporter.java new file mode 100644 index 00000000000..8ef1c4ace44 --- /dev/null +++ b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/TestDataExporter.java @@ -0,0 +1,251 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp; + +import static io.opentelemetry.api.common.AttributeKey.booleanKey; +import static io.opentelemetry.api.common.AttributeKey.longKey; +import static io.opentelemetry.api.common.AttributeKey.stringKey; + +import com.google.common.io.Resources; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.api.trace.TraceFlags; +import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.logs.TestLogRecordData; +import io.opentelemetry.sdk.testing.logs.internal.TestExtendedLogRecordData; +import io.opentelemetry.sdk.testing.trace.TestSpanData; +import io.opentelemetry.sdk.trace.data.EventData; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.data.StatusData; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Collections; +import java.util.concurrent.TimeUnit; + +abstract class TestDataExporter { + + private final String expectedFileNoWrapper; + private final String expectedFileWrapper; + private static final Resource RESOURCE = + Resource.create(Attributes.builder().put("key", "value").build()); + + private static final LogRecordData LOG1 = + TestExtendedLogRecordData.builder() + .setResource(RESOURCE) + .setInstrumentationScopeInfo( + InstrumentationScopeInfo.builder("instrumentation") + .setVersion("1") + .setAttributes(Attributes.builder().put("key", "value").build()) + .build()) + .setEventName("event name") + .setBody("body1") + .setSeverity(Severity.INFO) + .setSeverityText("INFO") + .setTimestamp(100L, TimeUnit.NANOSECONDS) + .setObservedTimestamp(200L, TimeUnit.NANOSECONDS) + .setAttributes(Attributes.of(stringKey("animal"), "cat", longKey("lives"), 9L)) + .setTotalAttributeCount(2) + .setSpanContext( + SpanContext.create( + "12345678876543211234567887654322", + "8765432112345876", + TraceFlags.getDefault(), + TraceState.getDefault())) + .build(); + + private static final LogRecordData LOG2 = + TestLogRecordData.builder() + .setResource(RESOURCE) + .setInstrumentationScopeInfo( + InstrumentationScopeInfo.builder("instrumentation2").setVersion("2").build()) + .setBody("body2") + .setSeverity(Severity.INFO) + .setSeverityText("INFO") + .setTimestamp(100L, TimeUnit.NANOSECONDS) + .setObservedTimestamp(200L, TimeUnit.NANOSECONDS) + .setAttributes(Attributes.of(booleanKey("important"), true)) + .setTotalAttributeCount(1) + .setSpanContext( + SpanContext.create( + "12345678876543211234567887654322", + "8765432112345875", + TraceFlags.getDefault(), + TraceState.getDefault())) + .build(); + + static final SpanData SPAN1 = + TestSpanData.builder() + .setHasEnded(true) + .setSpanContext( + SpanContext.create( + "12345678876543211234567887654321", + "8765432112345678", + TraceFlags.getSampled(), + TraceState.getDefault())) + .setStartEpochNanos(100) + .setEndEpochNanos(100 + 1000) + .setStatus(StatusData.ok()) + .setName("testSpan1") + .setKind(SpanKind.INTERNAL) + .setAttributes(Attributes.of(stringKey("animal"), "cat", longKey("lives"), 9L)) + .setEvents( + Collections.singletonList( + EventData.create( + 100 + 500, + "somethingHappenedHere", + Attributes.of(booleanKey("important"), true)))) + .setTotalAttributeCount(2) + .setTotalRecordedEvents(1) + .setTotalRecordedLinks(0) + .setInstrumentationScopeInfo( + InstrumentationScopeInfo.builder("instrumentation") + .setVersion("1") + .setAttributes(Attributes.builder().put("key", "value").build()) + .build()) + .setResource(RESOURCE) + .build(); + + private static final SpanData SPAN2 = + TestSpanData.builder() + .setHasEnded(false) + .setSpanContext( + SpanContext.create( + "12340000000043211234000000004321", + "8765000000005678", + TraceFlags.getSampled(), + TraceState.getDefault())) + .setStartEpochNanos(500) + .setEndEpochNanos(500 + 1001) + .setStatus(StatusData.error()) + .setName("testSpan2") + .setKind(SpanKind.CLIENT) + .setResource(RESOURCE) + .setInstrumentationScopeInfo( + InstrumentationScopeInfo.builder("instrumentation2").setVersion("2").build()) + .build(); + + static final MetricData METRIC1 = + ImmutableMetricData.createDoubleSum( + RESOURCE, + InstrumentationScopeInfo.builder("instrumentation") + .setVersion("1") + .setAttributes(Attributes.builder().put("key", "value").build()) + .build(), + "metric1", + "metric1 description", + "m", + ImmutableSumData.create( + true, + AggregationTemporality.CUMULATIVE, + Collections.singletonList( + ImmutableDoublePointData.create( + 1, 2, Attributes.of(stringKey("cat"), "meow"), 4)))); + + private static final MetricData METRIC2 = + ImmutableMetricData.createDoubleSum( + RESOURCE, + InstrumentationScopeInfo.builder("instrumentation2").setVersion("2").build(), + "metric2", + "metric2 description", + "s", + ImmutableSumData.create( + true, + AggregationTemporality.CUMULATIVE, + Collections.singletonList( + ImmutableDoublePointData.create( + 1, 2, Attributes.of(stringKey("cat"), "meow"), 4)))); + + public TestDataExporter(String expectedFileNoWrapper, String expectedFileWrapper) { + this.expectedFileNoWrapper = expectedFileNoWrapper; + this.expectedFileWrapper = expectedFileWrapper; + } + + public String getExpectedJson(boolean withWrapper) throws IOException { + String file = withWrapper ? expectedFileWrapper : expectedFileNoWrapper; + return Resources.toString(Resources.getResource(file), StandardCharsets.UTF_8); + } + + abstract CompletableResultCode export(T exporter); + + abstract CompletableResultCode flush(T exporter); + + abstract CompletableResultCode shutdown(T exporter); + + static TestDataExporter forLogs() { + return new TestDataExporter( + "expected-logs.json", "expected-logs-wrapper.json") { + @Override + public CompletableResultCode export(LogRecordExporter exporter) { + return exporter.export(Arrays.asList(LOG1, LOG2)); + } + + @Override + public CompletableResultCode flush(LogRecordExporter exporter) { + return exporter.flush(); + } + + @Override + public CompletableResultCode shutdown(LogRecordExporter exporter) { + return exporter.shutdown(); + } + }; + } + + static TestDataExporter forSpans() { + return new TestDataExporter( + "expected-spans.json", "expected-spans-wrapper.json") { + @Override + public CompletableResultCode export(SpanExporter exporter) { + return exporter.export(Arrays.asList(SPAN1, SPAN2)); + } + + @Override + public CompletableResultCode flush(SpanExporter exporter) { + return exporter.flush(); + } + + @Override + public CompletableResultCode shutdown(SpanExporter exporter) { + return exporter.shutdown(); + } + }; + } + + static TestDataExporter forMetrics() { + return new TestDataExporter( + "expected-metrics.json", "expected-metrics-wrapper.json") { + @Override + public CompletableResultCode export(MetricExporter exporter) { + return exporter.export(Arrays.asList(METRIC1, METRIC2)); + } + + @Override + public CompletableResultCode flush(MetricExporter exporter) { + return exporter.flush(); + } + + @Override + public CompletableResultCode shutdown(MetricExporter exporter) { + return exporter.shutdown(); + } + }; + } +} diff --git a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/internal/LoggingExporterProviderTest.java b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/internal/LoggingExporterProviderTest.java deleted file mode 100644 index decd00aec4b..00000000000 --- a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/internal/LoggingExporterProviderTest.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.logging.otlp.internal; - -import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; - -import io.opentelemetry.exporter.logging.otlp.OtlpJsonLoggingLogRecordExporter; -import io.opentelemetry.exporter.logging.otlp.OtlpJsonLoggingMetricExporter; -import io.opentelemetry.exporter.logging.otlp.OtlpJsonLoggingSpanExporter; -import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; -import java.util.Collections; -import org.junit.jupiter.api.Test; - -class LoggingExporterProviderTest { - - @Test - void logRecordExporterProvider() { - LoggingLogRecordExporterProvider provider = new LoggingLogRecordExporterProvider(); - assertThat(provider.getName()).isEqualTo("logging-otlp"); - assertThat( - provider.createExporter(DefaultConfigProperties.createFromMap(Collections.emptyMap()))) - .isInstanceOf(OtlpJsonLoggingLogRecordExporter.class); - } - - @Test - void metricExporterProvider() { - LoggingMetricExporterProvider provider = new LoggingMetricExporterProvider(); - assertThat(provider.getName()).isEqualTo("logging-otlp"); - assertThat( - provider.createExporter(DefaultConfigProperties.createFromMap(Collections.emptyMap()))) - .isInstanceOf(OtlpJsonLoggingMetricExporter.class); - } - - @Test - void spanExporterProvider() { - LoggingSpanExporterProvider provider = new LoggingSpanExporterProvider(); - assertThat(provider.getName()).isEqualTo("logging-otlp"); - assertThat( - provider.createExporter(DefaultConfigProperties.createFromMap(Collections.emptyMap()))) - .isInstanceOf(OtlpJsonLoggingSpanExporter.class); - } -} diff --git a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/internal/writer/LoggerJsonWriterTest.java b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/internal/writer/LoggerJsonWriterTest.java new file mode 100644 index 00000000000..680c7cc444d --- /dev/null +++ b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/internal/writer/LoggerJsonWriterTest.java @@ -0,0 +1,48 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.writer; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; + +import com.fasterxml.jackson.core.JsonGenerator; +import io.github.netmikey.logunit.api.LogCapturer; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.internal.testing.slf4j.SuppressLogger; +import java.io.IOException; +import java.util.logging.Logger; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.mockito.Mockito; + +@SuppressLogger(LoggerJsonWriter.class) +class LoggerJsonWriterTest { + + @RegisterExtension + static final LogCapturer logs = LogCapturer.create().captureForType(LoggerJsonWriter.class); + + @Test + void testToString() { + LoggerJsonWriter writer = new LoggerJsonWriter(null, "type"); + assertThat(writer.toString()).isEqualTo("LoggerJsonWriter"); + } + + @Test + void error() throws IOException { + Marshaler marshaler = mock(Marshaler.class); + Mockito.doThrow(new IOException("test")) + .when(marshaler) + .writeJsonToGenerator(any(JsonGenerator.class)); + + Logger logger = Logger.getLogger(LoggerJsonWriter.class.getName()); + + LoggerJsonWriter writer = new LoggerJsonWriter(logger, "type"); + writer.write(marshaler); + + logs.assertContains("Unable to write OTLP JSON type"); + } +} diff --git a/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/internal/writer/StreamJsonWriterTest.java b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/internal/writer/StreamJsonWriterTest.java new file mode 100644 index 00000000000..048fcab1c17 --- /dev/null +++ b/exporters/logging-otlp/src/test/java/io/opentelemetry/exporter/logging/otlp/internal/writer/StreamJsonWriterTest.java @@ -0,0 +1,76 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.otlp.internal.writer; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; + +import com.fasterxml.jackson.core.JsonGenerator; +import io.github.netmikey.logunit.api.LogCapturer; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.internal.testing.slf4j.SuppressLogger; +import java.io.FilterOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.junit.jupiter.api.io.TempDir; +import org.mockito.Mockito; + +@SuppressWarnings("SystemOut") +@SuppressLogger(StreamJsonWriter.class) +class StreamJsonWriterTest { + + @RegisterExtension + static final LogCapturer logs = LogCapturer.create().captureForType(StreamJsonWriter.class); + + @TempDir Path tempDir; + + @Test + @SuppressWarnings("SystemOut") + void testToString() throws IOException { + assertThat( + new StreamJsonWriter(Files.newOutputStream(tempDir.resolve("foo")), "type").toString()) + .startsWith("StreamJsonWriter{outputStream=") + .contains("Channel"); + assertThat(new StreamJsonWriter(System.out, "type").toString()) + .isEqualTo("StreamJsonWriter{outputStream=stdout}"); + assertThat(new StreamJsonWriter(System.err, "type").toString()) + .isEqualTo("StreamJsonWriter{outputStream=stderr}"); + } + + @Test + void errorWriting() throws IOException { + Marshaler marshaler = mock(Marshaler.class); + Mockito.doThrow(new IOException("test")) + .when(marshaler) + .writeJsonWithNewline(any(JsonGenerator.class)); + + StreamJsonWriter writer = new StreamJsonWriter(System.out, "type"); + writer.write(marshaler); + + logs.assertContains("Unable to write OTLP JSON type"); + } + + @Test + void errorFlushing() { + OutputStream outputStream = + new FilterOutputStream(System.out) { + @Override + public void flush() throws IOException { + throw new IOException("No flush"); + } + }; + + StreamJsonWriter writer = new StreamJsonWriter(outputStream, "type"); + writer.flush(); + + logs.assertContains("Failed to flush items"); + } +} diff --git a/exporters/logging-otlp/src/test/resources/expected-logs-wrapper.json b/exporters/logging-otlp/src/test/resources/expected-logs-wrapper.json new file mode 100644 index 00000000000..65767985bc2 --- /dev/null +++ b/exporters/logging-otlp/src/test/resources/expected-logs-wrapper.json @@ -0,0 +1,88 @@ +{ + "resourceLogs": [ + { + "resource": { + "attributes": [ + { + "key": "key", + "value": { + "stringValue": "value" + } + } + ] + }, + "scopeLogs": [ + { + "scope": { + "name": "instrumentation", + "version": "1", + "attributes": [ + { + "key": "key", + "value": { + "stringValue": "value" + } + } + ] + }, + "logRecords": [ + { + "eventName": "event name", + "timeUnixNano": "100", + "observedTimeUnixNano": "200", + "severityNumber": 9, + "severityText": "INFO", + "body": { + "stringValue": "body1" + }, + "attributes": [ + { + "key": "animal", + "value": { + "stringValue": "cat" + } + }, + { + "key": "lives", + "value": { + "intValue": "9" + } + } + ], + "traceId": "12345678876543211234567887654322", + "spanId": "8765432112345876" + } + ] + }, + { + "scope": { + "name": "instrumentation2", + "version": "2", + "attributes": [] + }, + "logRecords": [ + { + "timeUnixNano": "100", + "observedTimeUnixNano": "200", + "severityNumber": 9, + "severityText": "INFO", + "body": { + "stringValue": "body2" + }, + "attributes": [ + { + "key": "important", + "value": { + "boolValue": true + } + } + ], + "traceId": "12345678876543211234567887654322", + "spanId": "8765432112345875" + } + ] + } + ] + } + ] +} diff --git a/exporters/logging-otlp/src/test/resources/expected-logs.json b/exporters/logging-otlp/src/test/resources/expected-logs.json new file mode 100644 index 00000000000..f9bb36dd273 --- /dev/null +++ b/exporters/logging-otlp/src/test/resources/expected-logs.json @@ -0,0 +1,85 @@ +{ + "resource": { + "attributes": [ + { + "key": "key", + "value": { + "stringValue": "value" + } + } + ] + }, + "scopeLogs": [ + { + "scope": { + "name": "instrumentation", + "version": "1", + "attributes": [ + { + "key": "key", + "value": { + "stringValue": "value" + } + } + ] + }, + "logRecords": [ + { + "eventName": "event name", + "timeUnixNano": "100", + "observedTimeUnixNano": "200", + "severityNumber": 9, + "severityText": "INFO", + "body": { + "stringValue": "body1" + }, + "attributes": [ + { + "key": "animal", + "value": { + "stringValue": "cat" + } + }, + { + "key": "lives", + "value": { + "intValue": "9" + } + } + ], + "traceId": "12345678876543211234567887654322", + "spanId": "8765432112345876" + } + ] + }, + { + "scope": { + "name": "instrumentation2", + "version": "2", + "attributes": [] + }, + "logRecords": [ + { + "timeUnixNano": "100", + "observedTimeUnixNano": "200", + "severityNumber": 9, + "severityText": "INFO", + "body": { + "stringValue": "body2" + }, + "attributes": [ + { + "key": "important", + "value": { + "boolValue": true + } + } + ], + "traceId": "12345678876543211234567887654322", + "spanId": "8765432112345875" + } + ] + } + ] +} + diff --git a/exporters/logging-otlp/src/test/resources/expected-metrics-wrapper.json b/exporters/logging-otlp/src/test/resources/expected-metrics-wrapper.json new file mode 100644 index 00000000000..9c1255a6279 --- /dev/null +++ b/exporters/logging-otlp/src/test/resources/expected-metrics-wrapper.json @@ -0,0 +1,93 @@ +{ + "resourceMetrics": [ + { + "resource": { + "attributes": [ + { + "key": "key", + "value": { + "stringValue": "value" + } + } + ] + }, + "scopeMetrics": [ + { + "scope": { + "name": "instrumentation", + "version": "1", + "attributes": [ + { + "key": "key", + "value": { + "stringValue": "value" + } + } + ] + }, + "metrics": [ + { + "name": "metric1", + "description": "metric1 description", + "unit": "m", + "sum": { + "dataPoints": [ + { + "startTimeUnixNano": "1", + "timeUnixNano": "2", + "asDouble": 4.0, + "exemplars": [], + "attributes": [ + { + "key": "cat", + "value": { + "stringValue": "meow" + } + } + ] + } + ], + "aggregationTemporality": 2, + "isMonotonic": true + } + } + ] + }, + { + "scope": { + "name": "instrumentation2", + "version": "2", + "attributes": [] + }, + "metrics": [ + { + "name": "metric2", + "description": "metric2 description", + "unit": "s", + "sum": { + "dataPoints": [ + { + "startTimeUnixNano": "1", + "timeUnixNano": "2", + "asDouble": 4.0, + "exemplars": [], + "attributes": [ + { + "key": "cat", + "value": { + "stringValue": "meow" + } + } + ] + } + ], + "aggregationTemporality": 2, + "isMonotonic": true + } + } + ] + } + ] + } + ] +} diff --git a/exporters/logging-otlp/src/test/resources/expected-metrics.json b/exporters/logging-otlp/src/test/resources/expected-metrics.json new file mode 100644 index 00000000000..1a05a682e56 --- /dev/null +++ b/exporters/logging-otlp/src/test/resources/expected-metrics.json @@ -0,0 +1,89 @@ +{ + "resource": { + "attributes": [ + { + "key": "key", + "value": { + "stringValue": "value" + } + } + ] + }, + "scopeMetrics": [ + { + "scope": { + "name": "instrumentation", + "version": "1", + "attributes": [ + { + "key": "key", + "value": { + "stringValue": "value" + } + } + ] + }, + "metrics": [ + { + "name": "metric1", + "description": "metric1 description", + "unit": "m", + "sum": { + "dataPoints": [ + { + "startTimeUnixNano": "1", + "timeUnixNano": "2", + "asDouble": 4.0, + "exemplars": [], + "attributes": [ + { + "key": "cat", + "value": { + "stringValue": "meow" + } + } + ] + } + ], + "aggregationTemporality": 2, + "isMonotonic": true + } + } + ] + }, + { + "scope": { + "name": "instrumentation2", + "version": "2", + "attributes": [] + }, + "metrics": [ + { + "name": "metric2", + "description": "metric2 description", + "unit": "s", + "sum": { + "dataPoints": [ + { + "startTimeUnixNano": "1", + "timeUnixNano": "2", + "asDouble": 4.0, + "exemplars": [], + "attributes": [ + { + "key": "cat", + "value": { + "stringValue": "meow" + } + } + ] + } + ], + "aggregationTemporality": 2, + "isMonotonic": true + } + } + ] + } + ] +} diff --git a/exporters/logging-otlp/src/test/resources/expected-spans-wrapper.json b/exporters/logging-otlp/src/test/resources/expected-spans-wrapper.json new file mode 100644 index 00000000000..f1dd80ed9e8 --- /dev/null +++ b/exporters/logging-otlp/src/test/resources/expected-spans-wrapper.json @@ -0,0 +1,99 @@ +{ + "resourceSpans": [ + { + "resource": { + "attributes": [ + { + "key": "key", + "value": { + "stringValue": "value" + } + } + ] + }, + "scopeSpans": [ + { + "scope": { + "name": "instrumentation", + "version": "1", + "attributes": [ + { + "key": "key", + "value": { + "stringValue": "value" + } + } + ] + }, + "spans": [ + { + "traceId": "12345678876543211234567887654321", + "spanId": "8765432112345678", + "name": "testSpan1", + "kind": 1, + "startTimeUnixNano": "100", + "endTimeUnixNano": "1100", + "attributes": [ + { + "key": "animal", + "value": { + "stringValue": "cat" + } + }, + { + "key": "lives", + "value": { + "intValue": "9" + } + } + ], + "events": [ + { + "timeUnixNano": "600", + "name": "somethingHappenedHere", + "attributes": [ + { + "key": "important", + "value": { + "boolValue": true + } + } + ] + } + ], + "links": [], + "status": { + "code": 1 + }, + "flags": 257 + } + ] + }, + { + "scope": { + "name": "instrumentation2", + "version": "2", + "attributes": [] + }, + "spans": [ + { + "traceId": "12340000000043211234000000004321", + "spanId": "8765000000005678", + "name": "testSpan2", + "kind": 3, + "startTimeUnixNano": "500", + "endTimeUnixNano": "1501", + "attributes": [], + "events": [], + "links": [], + "status": { + "code": 2 + }, + "flags": 257 + } + ] + } + ] + } + ] +} diff --git a/exporters/logging-otlp/src/test/resources/expected-spans.json b/exporters/logging-otlp/src/test/resources/expected-spans.json new file mode 100644 index 00000000000..22e57949d90 --- /dev/null +++ b/exporters/logging-otlp/src/test/resources/expected-spans.json @@ -0,0 +1,95 @@ +{ + "resource": { + "attributes": [ + { + "key": "key", + "value": { + "stringValue": "value" + } + } + ] + }, + "scopeSpans": [ + { + "scope": { + "name": "instrumentation", + "version": "1", + "attributes": [ + { + "key": "key", + "value": { + "stringValue": "value" + } + } + ] + }, + "spans": [ + { + "traceId": "12345678876543211234567887654321", + "spanId": "8765432112345678", + "name": "testSpan1", + "kind": 1, + "startTimeUnixNano": "100", + "endTimeUnixNano": "1100", + "attributes": [ + { + "key": "animal", + "value": { + "stringValue": "cat" + } + }, + { + "key": "lives", + "value": { + "intValue": "9" + } + } + ], + "events": [ + { + "timeUnixNano": "600", + "name": "somethingHappenedHere", + "attributes": [ + { + "key": "important", + "value": { + "boolValue": true + } + } + ] + } + ], + "links": [], + "status": { + "code": 1 + }, + "flags": 257 + } + ] + }, + { + "scope": { + "name": "instrumentation2", + "version": "2", + "attributes": [] + }, + "spans": [ + { + "traceId": "12340000000043211234000000004321", + "spanId": "8765000000005678", + "name": "testSpan2", + "kind": 3, + "startTimeUnixNano": "500", + "endTimeUnixNano": "1501", + "attributes": [], + "events": [], + "links": [], + "status": { + "code": 2 + }, + "flags": 257 + } + ] + } + ] +} diff --git a/exporters/logging/build.gradle.kts b/exporters/logging/build.gradle.kts index 6feefb00fe8..82e96771834 100644 --- a/exporters/logging/build.gradle.kts +++ b/exporters/logging/build.gradle.kts @@ -12,6 +12,7 @@ dependencies { api(project(":sdk:all")) implementation(project(":sdk-extensions:autoconfigure-spi")) + compileOnly(project(":api:incubator")) testImplementation(project(":sdk:testing")) } diff --git a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/SystemOutLogRecordExporter.java b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/SystemOutLogRecordExporter.java index 6655c12453a..3848bf0225a 100644 --- a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/SystemOutLogRecordExporter.java +++ b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/SystemOutLogRecordExporter.java @@ -7,6 +7,7 @@ import static java.util.concurrent.TimeUnit.NANOSECONDS; +import io.opentelemetry.api.common.Value; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.logs.data.LogRecordData; @@ -63,6 +64,7 @@ public CompletableResultCode flush() { // VisibleForTesting static void formatLog(StringBuilder stringBuilder, LogRecordData log) { InstrumentationScopeInfo instrumentationScopeInfo = log.getInstrumentationScopeInfo(); + Value body = log.getBodyValue(); stringBuilder .append( ISO_FORMAT.format( @@ -71,7 +73,7 @@ static void formatLog(StringBuilder stringBuilder, LogRecordData log) { .append(" ") .append(log.getSeverity()) .append(" '") - .append(log.getBody().asString()) + .append(body == null ? "" : body.asString()) .append("' : ") .append(log.getSpanContext().getTraceId()) .append(" ") diff --git a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleLogRecordExporterComponentProvider.java b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleLogRecordExporterComponentProvider.java new file mode 100644 index 00000000000..2d8141cc4f2 --- /dev/null +++ b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleLogRecordExporterComponentProvider.java @@ -0,0 +1,36 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.internal; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.logging.SystemOutLogRecordExporter; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; + +/** + * Declarative configuration SPI implementation for {@link SystemOutLogRecordExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class ConsoleLogRecordExporterComponentProvider + implements ComponentProvider { + + @Override + public Class getType() { + return LogRecordExporter.class; + } + + @Override + public String getName() { + return "console"; + } + + @Override + public LogRecordExporter create(DeclarativeConfigProperties config) { + return SystemOutLogRecordExporter.create(); + } +} diff --git a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleLogRecordExporterProvider.java b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleLogRecordExporterProvider.java new file mode 100644 index 00000000000..e9911c8fd86 --- /dev/null +++ b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleLogRecordExporterProvider.java @@ -0,0 +1,30 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.internal; + +import io.opentelemetry.exporter.logging.SystemOutLogRecordExporter; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; + +/** + * {@link LogRecordExporter} SPI implementation for {@link SystemOutLogRecordExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class ConsoleLogRecordExporterProvider + implements ConfigurableLogRecordExporterProvider { + @Override + public LogRecordExporter createExporter(ConfigProperties config) { + return SystemOutLogRecordExporter.create(); + } + + @Override + public String getName() { + return "console"; + } +} diff --git a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleMetricExporterComponentProvider.java b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleMetricExporterComponentProvider.java new file mode 100644 index 00000000000..6fab453403f --- /dev/null +++ b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleMetricExporterComponentProvider.java @@ -0,0 +1,36 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.internal; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.logging.LoggingMetricExporter; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.metrics.export.MetricExporter; + +/** + * Declarative configuration SPI implementation for {@link LoggingMetricExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class ConsoleMetricExporterComponentProvider + implements ComponentProvider { + + @Override + public Class getType() { + return MetricExporter.class; + } + + @Override + public String getName() { + return "console"; + } + + @Override + public MetricExporter create(DeclarativeConfigProperties config) { + return LoggingMetricExporter.create(); + } +} diff --git a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleMetricExporterProvider.java b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleMetricExporterProvider.java new file mode 100644 index 00000000000..66983914a56 --- /dev/null +++ b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleMetricExporterProvider.java @@ -0,0 +1,29 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.internal; + +import io.opentelemetry.exporter.logging.LoggingMetricExporter; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider; +import io.opentelemetry.sdk.metrics.export.MetricExporter; + +/** + * {@link MetricExporter} SPI implementation for {@link LoggingMetricExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class ConsoleMetricExporterProvider implements ConfigurableMetricExporterProvider { + @Override + public MetricExporter createExporter(ConfigProperties config) { + return LoggingMetricExporter.create(); + } + + @Override + public String getName() { + return "console"; + } +} diff --git a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleSpanExporterComponentProvider.java b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleSpanExporterComponentProvider.java new file mode 100644 index 00000000000..fd65a5acad8 --- /dev/null +++ b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleSpanExporterComponentProvider.java @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.internal; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.logging.LoggingSpanExporter; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.trace.export.SpanExporter; + +/** + * Declarative configuration SPI implementation for {@link LoggingSpanExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class ConsoleSpanExporterComponentProvider implements ComponentProvider { + + @Override + public Class getType() { + return SpanExporter.class; + } + + @Override + public String getName() { + return "console"; + } + + @Override + public SpanExporter create(DeclarativeConfigProperties config) { + return LoggingSpanExporter.create(); + } +} diff --git a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleSpanExporterProvider.java b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleSpanExporterProvider.java new file mode 100644 index 00000000000..220bd2d6ef0 --- /dev/null +++ b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/ConsoleSpanExporterProvider.java @@ -0,0 +1,29 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.logging.internal; + +import io.opentelemetry.exporter.logging.LoggingSpanExporter; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider; +import io.opentelemetry.sdk.trace.export.SpanExporter; + +/** + * {@link SpanExporter} SPI implementation for {@link LoggingSpanExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class ConsoleSpanExporterProvider implements ConfigurableSpanExporterProvider { + @Override + public SpanExporter createExporter(ConfigProperties config) { + return LoggingSpanExporter.create(); + } + + @Override + public String getName() { + return "console"; + } +} diff --git a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/LoggingLogRecordExporterProvider.java b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/LoggingLogRecordExporterProvider.java index e6fbd6fbc8d..d84c4a9c0e8 100644 --- a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/LoggingLogRecordExporterProvider.java +++ b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/LoggingLogRecordExporterProvider.java @@ -15,8 +15,13 @@ * *

This class is internal and is hence not for public use. Its APIs are unstable and can change * at any time. + * + * @deprecated The name {@code logging} is a deprecated alias for {@code console}, which is provided + * via {@link ConsoleLogRecordExporterProvider}. */ -public class LoggingLogRecordExporterProvider implements ConfigurableLogRecordExporterProvider { +@Deprecated +public final class LoggingLogRecordExporterProvider + implements ConfigurableLogRecordExporterProvider { @Override public LogRecordExporter createExporter(ConfigProperties config) { return SystemOutLogRecordExporter.create(); diff --git a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/LoggingMetricExporterProvider.java b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/LoggingMetricExporterProvider.java index 47605ac1154..479e7625202 100644 --- a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/LoggingMetricExporterProvider.java +++ b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/LoggingMetricExporterProvider.java @@ -15,8 +15,12 @@ * *

This class is internal and is hence not for public use. Its APIs are unstable and can change * at any time. + * + * @deprecated The name {@code logging} is a deprecated alias for {@code console}, which is provided + * via {@link ConsoleMetricExporterProvider}. */ -public class LoggingMetricExporterProvider implements ConfigurableMetricExporterProvider { +@Deprecated +public final class LoggingMetricExporterProvider implements ConfigurableMetricExporterProvider { @Override public MetricExporter createExporter(ConfigProperties config) { return LoggingMetricExporter.create(); diff --git a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/LoggingSpanExporterProvider.java b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/LoggingSpanExporterProvider.java index 5854a12fe64..24f40ce1867 100644 --- a/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/LoggingSpanExporterProvider.java +++ b/exporters/logging/src/main/java/io/opentelemetry/exporter/logging/internal/LoggingSpanExporterProvider.java @@ -15,8 +15,12 @@ * *

This class is internal and is hence not for public use. Its APIs are unstable and can change * at any time. + * + * @deprecated The name {@code logging} is a deprecated alias for {@code console}, which is provided + * via {@link ConsoleSpanExporterProvider}. */ -public class LoggingSpanExporterProvider implements ConfigurableSpanExporterProvider { +@Deprecated +public final class LoggingSpanExporterProvider implements ConfigurableSpanExporterProvider { @Override public SpanExporter createExporter(ConfigProperties config) { return LoggingSpanExporter.create(); diff --git a/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider b/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider new file mode 100644 index 00000000000..ed0c3a77f5e --- /dev/null +++ b/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider @@ -0,0 +1,3 @@ +io.opentelemetry.exporter.logging.internal.ConsoleMetricExporterComponentProvider +io.opentelemetry.exporter.logging.internal.ConsoleSpanExporterComponentProvider +io.opentelemetry.exporter.logging.internal.ConsoleLogRecordExporterComponentProvider diff --git a/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider b/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider index 8d8842825ba..29f8e2db2da 100644 --- a/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider +++ b/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider @@ -1 +1,2 @@ io.opentelemetry.exporter.logging.internal.LoggingLogRecordExporterProvider +io.opentelemetry.exporter.logging.internal.ConsoleLogRecordExporterProvider diff --git a/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider b/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider index 3ad21a55ccd..3bdcd20b33f 100644 --- a/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider +++ b/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider @@ -1 +1,2 @@ io.opentelemetry.exporter.logging.internal.LoggingMetricExporterProvider +io.opentelemetry.exporter.logging.internal.ConsoleMetricExporterProvider diff --git a/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider b/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider index 8806e4b9608..682519c7938 100644 --- a/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider +++ b/exporters/logging/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider @@ -1 +1,2 @@ io.opentelemetry.exporter.logging.internal.LoggingSpanExporterProvider +io.opentelemetry.exporter.logging.internal.ConsoleSpanExporterProvider diff --git a/exporters/otlp/all/build.gradle.kts b/exporters/otlp/all/build.gradle.kts index e7b87b9efa9..eb9ad982c6f 100644 --- a/exporters/otlp/all/build.gradle.kts +++ b/exporters/otlp/all/build.gradle.kts @@ -20,12 +20,12 @@ dependencies { implementation(project(":exporters:sender:okhttp")) implementation(project(":sdk-extensions:autoconfigure-spi")) + compileOnly(project(":api:incubator")) + compileOnly("io.grpc:grpc-stub") testImplementation(project(":exporters:otlp:testing-internal")) testImplementation("com.linecorp.armeria:armeria-junit5") - testImplementation("com.google.api.grpc:proto-google-common-protos") - testImplementation("com.squareup.okhttp3:okhttp-tls") testImplementation("io.grpc:grpc-stub") jmhImplementation(project(":sdk:testing")) @@ -41,6 +41,45 @@ val testJavaVersion: String? by project testing { suites { + listOf( + "LATEST", + "4.11.0" + ).forEach { + register("testOkHttpVersion$it") { + sources { + java { + setSrcDirs(listOf("src/testDefaultSender/java")) + } + } + dependencies { + implementation(project(":exporters:sender:okhttp")) + implementation(project(":exporters:otlp:testing-internal")) + + implementation(platform("com.squareup.okhttp3:okhttp-bom")) { + // Only impose dependency constraint if not testing the LATEST version, which is defined in /dependencyManagement/build.gradle.kts + if (!it.equals("LATEST")) { + version { + strictly(it) + } + } + } + + implementation("com.squareup.okhttp3:okhttp") + implementation("io.grpc:grpc-stub") + } + + targets { + all { + testTask { + // Only enable test suite for non-LATEST in GitHub CI (CI=true) + enabled = it.equals("LATEST") || "true".equals(System.getenv("CI")) + systemProperty("expected.okhttp.version", it) + } + } + } + } + } + register("testGrpcNetty") { dependencies { implementation(project(":exporters:sender:grpc-managed-channel")) diff --git a/exporters/otlp/all/src/jmh/java/io/opentelemetry/exporter/otlp/trace/OltpExporterBenchmark.java b/exporters/otlp/all/src/jmh/java/io/opentelemetry/exporter/otlp/trace/OltpExporterBenchmark.java index 9271ad9fc7e..f45272a4bca 100644 --- a/exporters/otlp/all/src/jmh/java/io/opentelemetry/exporter/otlp/trace/OltpExporterBenchmark.java +++ b/exporters/otlp/all/src/jmh/java/io/opentelemetry/exporter/otlp/trace/OltpExporterBenchmark.java @@ -15,6 +15,7 @@ import io.opentelemetry.exporter.internal.grpc.GrpcExporter; import io.opentelemetry.exporter.internal.http.HttpExporter; import io.opentelemetry.exporter.internal.http.HttpExporterBuilder; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.exporter.internal.otlp.traces.TraceRequestMarshaler; import io.opentelemetry.exporter.sender.grpc.managedchannel.internal.UpstreamGrpcSender; import io.opentelemetry.exporter.sender.okhttp.internal.OkHttpGrpcSender; @@ -44,6 +45,7 @@ @Measurement(iterations = 20, time = 1) @Fork(1) @State(Scope.Benchmark) +@SuppressWarnings("NonFinalStaticField") public class OltpExporterBenchmark { private static final Server server = Server.builder() @@ -66,7 +68,7 @@ public void export( private static ManagedChannel defaultGrpcChannel; - private static GrpcExporter upstreamGrpcExporter; + private static GrpcExporter upstreamGrpcExporter; private static GrpcExporter okhttpGrpcSender; private static HttpExporter httpExporter; @@ -83,7 +85,11 @@ public void setUp() { "otlp", "span", new UpstreamGrpcSender<>( - MarshalerTraceServiceGrpc.newFutureStub(defaultGrpcChannel, null), 10), + MarshalerTraceServiceGrpc.newFutureStub(defaultGrpcChannel, null), + /* shutdownChannel= */ false, + 10, + Collections::emptyMap, + null), MeterProvider::noop); okhttpGrpcSender = @@ -94,9 +100,11 @@ public void setUp() { URI.create("http://localhost:" + server.activeLocalPort()) .resolve(OtlpGrpcSpanExporterBuilder.GRPC_ENDPOINT_PATH) .toString(), - false, + null, 10, - Collections.emptyMap(), + 10, + Collections::emptyMap, + null, null, null, null), diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/logs/OtlpHttpLogRecordExporter.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/logs/OtlpHttpLogRecordExporter.java index de48a358f41..dcc8b4bcc7a 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/logs/OtlpHttpLogRecordExporter.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/logs/OtlpHttpLogRecordExporter.java @@ -7,11 +7,14 @@ import io.opentelemetry.exporter.internal.http.HttpExporter; import io.opentelemetry.exporter.internal.http.HttpExporterBuilder; -import io.opentelemetry.exporter.internal.otlp.logs.LogsRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.otlp.logs.LogReusableDataMarshaler; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.logs.data.LogRecordData; import io.opentelemetry.sdk.logs.export.LogRecordExporter; import java.util.Collection; +import java.util.StringJoiner; import javax.annotation.concurrent.ThreadSafe; /** @@ -22,14 +25,17 @@ @ThreadSafe public final class OtlpHttpLogRecordExporter implements LogRecordExporter { - private final HttpExporterBuilder builder; - private final HttpExporter delegate; + private final HttpExporterBuilder builder; + private final HttpExporter delegate; + private final LogReusableDataMarshaler marshaler; OtlpHttpLogRecordExporter( - HttpExporterBuilder builder, - HttpExporter delegate) { + HttpExporterBuilder builder, + HttpExporter delegate, + MemoryMode memoryMode) { this.builder = builder; this.delegate = delegate; + this.marshaler = new LogReusableDataMarshaler(memoryMode, delegate::export); } /** @@ -61,7 +67,7 @@ public static OtlpHttpLogRecordExporterBuilder builder() { * @since 1.29.0 */ public OtlpHttpLogRecordExporterBuilder toBuilder() { - return new OtlpHttpLogRecordExporterBuilder(builder.copy()); + return new OtlpHttpLogRecordExporterBuilder(builder.copy(), marshaler.getMemoryMode()); } /** @@ -72,8 +78,7 @@ public OtlpHttpLogRecordExporterBuilder toBuilder() { */ @Override public CompletableResultCode export(Collection logs) { - LogsRequestMarshaler exportRequest = LogsRequestMarshaler.create(logs); - return delegate.export(exportRequest, logs.size()); + return marshaler.export(logs); } @Override @@ -89,6 +94,9 @@ public CompletableResultCode shutdown() { @Override public String toString() { - return "OtlpHttpLogRecordExporter{" + builder.toString(false) + "}"; + StringJoiner joiner = new StringJoiner(", ", "OtlpHttpLogRecordExporter{", "}"); + joiner.add(builder.toString(false)); + joiner.add("memoryMode=" + marshaler.getMemoryMode()); + return joiner.toString(); } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/logs/OtlpHttpLogRecordExporterBuilder.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/logs/OtlpHttpLogRecordExporterBuilder.java index cb74e9859a8..1ba267c7235 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/logs/OtlpHttpLogRecordExporterBuilder.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/logs/OtlpHttpLogRecordExporterBuilder.java @@ -10,12 +10,21 @@ import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.exporter.internal.compression.Compressor; +import io.opentelemetry.exporter.internal.compression.CompressorProvider; +import io.opentelemetry.exporter.internal.compression.CompressorUtil; import io.opentelemetry.exporter.internal.http.HttpExporterBuilder; -import io.opentelemetry.exporter.internal.otlp.logs.LogsRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.exporter.otlp.internal.OtlpUserAgent; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import java.time.Duration; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -27,16 +36,19 @@ public final class OtlpHttpLogRecordExporterBuilder { private static final String DEFAULT_ENDPOINT = "http://localhost:4318/v1/logs"; + private static final MemoryMode DEFAULT_MEMORY_MODE = MemoryMode.REUSABLE_DATA; - private final HttpExporterBuilder delegate; + private final HttpExporterBuilder delegate; + private MemoryMode memoryMode; - OtlpHttpLogRecordExporterBuilder(HttpExporterBuilder delegate) { + OtlpHttpLogRecordExporterBuilder(HttpExporterBuilder delegate, MemoryMode memoryMode) { this.delegate = delegate; - OtlpUserAgent.addUserAgentHeader(delegate::addHeader); + this.memoryMode = memoryMode; + OtlpUserAgent.addUserAgentHeader(delegate::addConstantHeaders); } OtlpHttpLogRecordExporterBuilder() { - this(new HttpExporterBuilder<>("otlp", "log", DEFAULT_ENDPOINT)); + this(new HttpExporterBuilder<>("otlp", "log", DEFAULT_ENDPOINT), DEFAULT_MEMORY_MODE); } /** @@ -59,6 +71,30 @@ public OtlpHttpLogRecordExporterBuilder setTimeout(Duration timeout) { return setTimeout(timeout.toNanos(), TimeUnit.NANOSECONDS); } + /** + * Sets the maximum time to wait for new connections to be established. If unset, defaults to + * {@value HttpExporterBuilder#DEFAULT_CONNECT_TIMEOUT_SECS}s. + * + * @since 1.33.0 + */ + public OtlpHttpLogRecordExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + requireNonNull(unit, "unit"); + checkArgument(timeout >= 0, "timeout must be non-negative"); + delegate.setConnectTimeout(timeout, unit); + return this; + } + + /** + * Sets the maximum time to wait for new connections to be established. If unset, defaults to + * {@value HttpExporterBuilder#DEFAULT_CONNECT_TIMEOUT_SECS}s. + * + * @since 1.33.0 + */ + public OtlpHttpLogRecordExporterBuilder setConnectTimeout(Duration timeout) { + requireNonNull(timeout, "timeout"); + return setConnectTimeout(timeout.toNanos(), TimeUnit.NANOSECONDS); + } + /** * Sets the OTLP endpoint to connect to. If unset, defaults to {@value DEFAULT_ENDPOINT}. The * endpoint must start with either http:// or https://, and include the full HTTP path. @@ -70,21 +106,34 @@ public OtlpHttpLogRecordExporterBuilder setEndpoint(String endpoint) { } /** - * Sets the method used to compress payloads. If unset, compression is disabled. Currently - * supported compression methods include "gzip" and "none". + * Sets the method used to compress payloads. If unset, compression is disabled. Compression + * method "gzip" and "none" are supported out of the box. Support for additional compression + * methods is available by implementing {@link Compressor} and {@link CompressorProvider}. */ public OtlpHttpLogRecordExporterBuilder setCompression(String compressionMethod) { requireNonNull(compressionMethod, "compressionMethod"); - checkArgument( - compressionMethod.equals("gzip") || compressionMethod.equals("none"), - "Unsupported compression method. Supported compression methods include: gzip, none."); - delegate.setCompression(compressionMethod); + Compressor compressor = CompressorUtil.validateAndResolveCompressor(compressionMethod); + delegate.setCompression(compressor); return this; } - /** Add header to requests. */ + /** + * Add a constant header to requests. If the {@code key} collides with another constant header + * name or a one from {@link #setHeaders(Supplier)}, the values from both are included. + */ public OtlpHttpLogRecordExporterBuilder addHeader(String key, String value) { - delegate.addHeader(key, value); + delegate.addConstantHeaders(key, value); + return this; + } + + /** + * Set the supplier of headers to add to requests. If a key from the map collides with a constant + * from {@link #addHeader(String, String)}, the values from both are included. + * + * @since 1.33.0 + */ + public OtlpHttpLogRecordExporterBuilder setHeaders(Supplier> headerSupplier) { + delegate.setHeadersSupplier(headerSupplier); return this; } @@ -119,23 +168,87 @@ public OtlpHttpLogRecordExporterBuilder setSslContext( } /** - * Ses the retry policy. Retry is disabled by default. + * Set the retry policy, or {@code null} to disable retry. Retry policy is {@link + * RetryPolicy#getDefault()} by default * * @since 1.28.0 */ - public OtlpHttpLogRecordExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { - requireNonNull(retryPolicy, "retryPolicy"); + public OtlpHttpLogRecordExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { delegate.setRetryPolicy(retryPolicy); return this; } + /** + * Sets the proxy options. Proxying is disabled by default. + * + * @since 1.36.0 + */ + public OtlpHttpLogRecordExporterBuilder setProxyOptions(ProxyOptions proxyOptions) { + requireNonNull(proxyOptions, "proxyOptions"); + delegate.setProxyOptions(proxyOptions); + return this; + } + /** * Sets the {@link MeterProvider} to use to collect metrics related to export. If not set, uses * {@link GlobalOpenTelemetry#getMeterProvider()}. */ public OtlpHttpLogRecordExporterBuilder setMeterProvider(MeterProvider meterProvider) { requireNonNull(meterProvider, "meterProvider"); - delegate.setMeterProvider(meterProvider); + setMeterProvider(() -> meterProvider); + return this; + } + + /** + * Sets the {@link MeterProvider} supplier used to collect metrics related to export. If not set, + * uses {@link GlobalOpenTelemetry#getMeterProvider()}. + * + * @since 1.32.0 + */ + public OtlpHttpLogRecordExporterBuilder setMeterProvider( + Supplier meterProviderSupplier) { + requireNonNull(meterProviderSupplier, "meterProviderSupplier"); + delegate.setMeterProvider(meterProviderSupplier); + return this; + } + + /** + * Set the {@link MemoryMode}. If unset, defaults to {@link #DEFAULT_MEMORY_MODE}. + * + *

When memory mode is {@link MemoryMode#REUSABLE_DATA}, serialization is optimized to reduce + * memory allocation. + * + * @since 1.39.0 + */ + public OtlpHttpLogRecordExporterBuilder setMemoryMode(MemoryMode memoryMode) { + requireNonNull(memoryMode, "memoryMode"); + this.memoryMode = memoryMode; + return this; + } + + /** + * Set the {@link ClassLoader} used to load the sender API. + * + * @since 1.48.0 + */ + public OtlpHttpLogRecordExporterBuilder setServiceClassLoader(ClassLoader serviceClassLoader) { + requireNonNull(serviceClassLoader, "serviceClassLoader"); + delegate.setServiceClassLoader(serviceClassLoader); + return this; + } + + /** + * Set the {@link ExecutorService} used to execute requests. + * + *

NOTE: By calling this method, you are opting into managing the lifecycle of the {@code + * executorService}. {@link ExecutorService#shutdown()} will NOT be called when this exporter is + * shutdown. + * + * @since 1.49.0 + */ + public OtlpHttpLogRecordExporterBuilder setExecutorService(ExecutorService executorService) { + requireNonNull(executorService, "executorService"); + delegate.setExecutorService(executorService); return this; } @@ -145,6 +258,6 @@ public OtlpHttpLogRecordExporterBuilder setMeterProvider(MeterProvider meterProv * @return a new exporter's instance */ public OtlpHttpLogRecordExporter build() { - return new OtlpHttpLogRecordExporter(delegate, delegate.build()); + return new OtlpHttpLogRecordExporter(delegate, delegate.build(), memoryMode); } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporter.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporter.java index 922baaaec5b..87d7b4c7a10 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporter.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporter.java @@ -7,8 +7,10 @@ import io.opentelemetry.exporter.internal.http.HttpExporter; import io.opentelemetry.exporter.internal.http.HttpExporterBuilder; -import io.opentelemetry.exporter.internal.otlp.metrics.MetricsRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.otlp.metrics.MetricReusableDataMarshaler; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; @@ -17,6 +19,7 @@ import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; import io.opentelemetry.sdk.metrics.export.MetricExporter; import java.util.Collection; +import java.util.StringJoiner; import javax.annotation.concurrent.ThreadSafe; /** @@ -27,20 +30,23 @@ @ThreadSafe public final class OtlpHttpMetricExporter implements MetricExporter { - private final HttpExporterBuilder builder; - private final HttpExporter delegate; + private final HttpExporterBuilder builder; + private final HttpExporter delegate; private final AggregationTemporalitySelector aggregationTemporalitySelector; private final DefaultAggregationSelector defaultAggregationSelector; + private final MetricReusableDataMarshaler marshaler; OtlpHttpMetricExporter( - HttpExporterBuilder builder, - HttpExporter delegate, + HttpExporterBuilder builder, + HttpExporter delegate, AggregationTemporalitySelector aggregationTemporalitySelector, - DefaultAggregationSelector defaultAggregationSelector) { + DefaultAggregationSelector defaultAggregationSelector, + MemoryMode memoryMode) { this.builder = builder; this.delegate = delegate; this.aggregationTemporalitySelector = aggregationTemporalitySelector; this.defaultAggregationSelector = defaultAggregationSelector; + this.marshaler = new MetricReusableDataMarshaler(memoryMode, delegate::export); } /** @@ -72,7 +78,7 @@ public static OtlpHttpMetricExporterBuilder builder() { * @since 1.29.0 */ public OtlpHttpMetricExporterBuilder toBuilder() { - return new OtlpHttpMetricExporterBuilder(builder.copy()); + return new OtlpHttpMetricExporterBuilder(builder.copy(), marshaler.getMemoryMode()); } @Override @@ -85,6 +91,11 @@ public Aggregation getDefaultAggregation(InstrumentType instrumentType) { return defaultAggregationSelector.getDefaultAggregation(instrumentType); } + @Override + public MemoryMode getMemoryMode() { + return marshaler.getMemoryMode(); + } + /** * Submits all the given metrics in a single batch to the OpenTelemetry collector. * @@ -93,8 +104,7 @@ public Aggregation getDefaultAggregation(InstrumentType instrumentType) { */ @Override public CompletableResultCode export(Collection metrics) { - MetricsRequestMarshaler exportRequest = MetricsRequestMarshaler.create(metrics); - return delegate.export(exportRequest, metrics.size()); + return marshaler.export(metrics); } /** @@ -115,6 +125,15 @@ public CompletableResultCode shutdown() { @Override public String toString() { - return "OtlpHttpMetricExporter{" + builder.toString(false) + "}"; + StringJoiner joiner = new StringJoiner(", ", "OtlpHttpMetricExporter{", "}"); + joiner.add(builder.toString(false)); + joiner.add( + "aggregationTemporalitySelector=" + + AggregationTemporalitySelector.asString(aggregationTemporalitySelector)); + joiner.add( + "defaultAggregationSelector=" + + DefaultAggregationSelector.asString(defaultAggregationSelector)); + joiner.add("memoryMode=" + marshaler.getMemoryMode()); + return joiner.toString(); } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterBuilder.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterBuilder.java index c9466d51819..dc5d5c667b4 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterBuilder.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterBuilder.java @@ -9,16 +9,27 @@ import static java.util.Objects.requireNonNull; import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.exporter.internal.compression.Compressor; +import io.opentelemetry.exporter.internal.compression.CompressorProvider; +import io.opentelemetry.exporter.internal.compression.CompressorUtil; import io.opentelemetry.exporter.internal.http.HttpExporterBuilder; -import io.opentelemetry.exporter.internal.otlp.metrics.MetricsRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.exporter.otlp.internal.OtlpUserAgent; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; import io.opentelemetry.sdk.metrics.export.MetricExporter; import java.time.Duration; +import java.util.Collection; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -33,22 +44,25 @@ public final class OtlpHttpMetricExporterBuilder { private static final AggregationTemporalitySelector DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR = AggregationTemporalitySelector.alwaysCumulative(); + private static final MemoryMode DEFAULT_MEMORY_MODE = MemoryMode.REUSABLE_DATA; - private final HttpExporterBuilder delegate; + private final HttpExporterBuilder delegate; private AggregationTemporalitySelector aggregationTemporalitySelector = DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR; private DefaultAggregationSelector defaultAggregationSelector = DefaultAggregationSelector.getDefault(); + private MemoryMode memoryMode; - OtlpHttpMetricExporterBuilder(HttpExporterBuilder delegate) { + OtlpHttpMetricExporterBuilder(HttpExporterBuilder delegate, MemoryMode memoryMode) { this.delegate = delegate; - delegate.setMeterProvider(MeterProvider.noop()); - OtlpUserAgent.addUserAgentHeader(delegate::addHeader); + this.memoryMode = memoryMode; + delegate.setMeterProvider(MeterProvider::noop); + OtlpUserAgent.addUserAgentHeader(delegate::addConstantHeaders); } OtlpHttpMetricExporterBuilder() { - this(new HttpExporterBuilder<>("otlp", "metric", DEFAULT_ENDPOINT)); + this(new HttpExporterBuilder<>("otlp", "metric", DEFAULT_ENDPOINT), DEFAULT_MEMORY_MODE); } /** @@ -71,6 +85,30 @@ public OtlpHttpMetricExporterBuilder setTimeout(Duration timeout) { return setTimeout(timeout.toNanos(), TimeUnit.NANOSECONDS); } + /** + * Sets the maximum time to wait for new connections to be established. If unset, defaults to + * {@value HttpExporterBuilder#DEFAULT_CONNECT_TIMEOUT_SECS}s. + * + * @since 1.33.0 + */ + public OtlpHttpMetricExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + requireNonNull(unit, "unit"); + checkArgument(timeout >= 0, "timeout must be non-negative"); + delegate.setConnectTimeout(timeout, unit); + return this; + } + + /** + * Sets the maximum time to wait for new connections to be established. If unset, defaults to + * {@value HttpExporterBuilder#DEFAULT_CONNECT_TIMEOUT_SECS}s. + * + * @since 1.33.0 + */ + public OtlpHttpMetricExporterBuilder setConnectTimeout(Duration timeout) { + requireNonNull(timeout, "timeout"); + return setConnectTimeout(timeout.toNanos(), TimeUnit.NANOSECONDS); + } + /** * Sets the OTLP endpoint to connect to. If unset, defaults to {@value DEFAULT_ENDPOINT}. The * endpoint must start with either http:// or https://, and include the full HTTP path. @@ -82,21 +120,34 @@ public OtlpHttpMetricExporterBuilder setEndpoint(String endpoint) { } /** - * Sets the method used to compress payloads. If unset, compression is disabled. Currently - * supported compression methods include "gzip" and "none". + * Sets the method used to compress payloads. If unset, compression is disabled. Compression + * method "gzip" and "none" are supported out of the box. Support for additional compression + * methods is available by implementing {@link Compressor} and {@link CompressorProvider}. */ public OtlpHttpMetricExporterBuilder setCompression(String compressionMethod) { requireNonNull(compressionMethod, "compressionMethod"); - checkArgument( - compressionMethod.equals("gzip") || compressionMethod.equals("none"), - "Unsupported compression method. Supported compression methods include: gzip, none."); - delegate.setCompression(compressionMethod); + Compressor compressor = CompressorUtil.validateAndResolveCompressor(compressionMethod); + delegate.setCompression(compressor); return this; } - /** Add header to requests. */ + /** + * Add a constant header to requests. If the {@code key} collides with another constant header + * name or a one from {@link #setHeaders(Supplier)}, the values from both are included. + */ public OtlpHttpMetricExporterBuilder addHeader(String key, String value) { - delegate.addHeader(key, value); + delegate.addConstantHeaders(key, value); + return this; + } + + /** + * Set the supplier of headers to add to requests. If a key from the map collides with a constant + * from {@link #addHeader(String, String)}, the values from both are included. + * + * @since 1.33.0 + */ + public OtlpHttpMetricExporterBuilder setHeaders(Supplier> headerSupplier) { + delegate.setHeadersSupplier(headerSupplier); return this; } @@ -163,16 +214,72 @@ public OtlpHttpMetricExporterBuilder setDefaultAggregationSelector( } /** - * Ses the retry policy. Retry is disabled by default. + * Set the retry policy, or {@code null} to disable retry. Retry policy is {@link + * RetryPolicy#getDefault()} by default * * @since 1.28.0 */ - public OtlpHttpMetricExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { - requireNonNull(retryPolicy, "retryPolicy"); + public OtlpHttpMetricExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { delegate.setRetryPolicy(retryPolicy); return this; } + /** + * Sets the proxy options. Proxying is disabled by default. + * + * @since 1.36.0 + */ + public OtlpHttpMetricExporterBuilder setProxyOptions(ProxyOptions proxyOptions) { + requireNonNull(proxyOptions, "proxyOptions"); + delegate.setProxyOptions(proxyOptions); + return this; + } + + /** + * Set the {@link MemoryMode}. If unset, defaults to {@link #DEFAULT_MEMORY_MODE}. + * + *

When memory mode is {@link MemoryMode#REUSABLE_DATA}, serialization is optimized to reduce + * memory allocation. Additionally, the value is used for {@link MetricExporter#getMemoryMode()}, + * which sends a signal to the metrics SDK to reuse memory when possible. This is safe and + * desirable for most use cases, but should be used with caution of wrapping and delegating to the + * exporter. It is not safe for the wrapping exporter to hold onto references to {@link + * MetricData} batches since the same data structures will be reused in subsequent calls to {@link + * MetricExporter#export(Collection)}. + * + * @since 1.39.0 + */ + public OtlpHttpMetricExporterBuilder setMemoryMode(MemoryMode memoryMode) { + requireNonNull(memoryMode, "memoryMode"); + this.memoryMode = memoryMode; + return this; + } + + /** + * Set the {@link ClassLoader} used to load the sender API. + * + * @since 1.48.0 + */ + public OtlpHttpMetricExporterBuilder setServiceClassLoader(ClassLoader serviceClassLoader) { + requireNonNull(serviceClassLoader, "serviceClassLoader"); + delegate.setServiceClassLoader(serviceClassLoader); + return this; + } + + /** + * Set the {@link ExecutorService} used to execute requests. + * + *

NOTE: By calling this method, you are opting into managing the lifecycle of the {@code + * executorService}. {@link ExecutorService#shutdown()} will NOT be called when this exporter is + * shutdown. + * + * @since 1.49.0 + */ + public OtlpHttpMetricExporterBuilder setExecutorService(ExecutorService executorService) { + requireNonNull(executorService, "executorService"); + delegate.setExecutorService(executorService); + return this; + } + OtlpHttpMetricExporterBuilder exportAsJson() { delegate.exportAsJson(); return this; @@ -185,6 +292,10 @@ OtlpHttpMetricExporterBuilder exportAsJson() { */ public OtlpHttpMetricExporter build() { return new OtlpHttpMetricExporter( - delegate, delegate.build(), aggregationTemporalitySelector, defaultAggregationSelector); + delegate, + delegate.build(), + aggregationTemporalitySelector, + defaultAggregationSelector, + memoryMode); } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporter.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporter.java index d693090e316..71870e12b54 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporter.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporter.java @@ -7,11 +7,14 @@ import io.opentelemetry.exporter.internal.http.HttpExporter; import io.opentelemetry.exporter.internal.http.HttpExporterBuilder; -import io.opentelemetry.exporter.internal.otlp.traces.TraceRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.otlp.traces.SpanReusableDataMarshaler; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.export.SpanExporter; import java.util.Collection; +import java.util.StringJoiner; import javax.annotation.concurrent.ThreadSafe; /** @@ -22,14 +25,17 @@ @ThreadSafe public final class OtlpHttpSpanExporter implements SpanExporter { - private final HttpExporterBuilder builder; - private final HttpExporter delegate; + private final HttpExporterBuilder builder; + private final HttpExporter delegate; + private final SpanReusableDataMarshaler marshaler; OtlpHttpSpanExporter( - HttpExporterBuilder builder, - HttpExporter delegate) { + HttpExporterBuilder builder, + HttpExporter delegate, + MemoryMode memoryMode) { this.builder = builder; this.delegate = delegate; + this.marshaler = new SpanReusableDataMarshaler(memoryMode, delegate::export); } /** @@ -61,7 +67,7 @@ public static OtlpHttpSpanExporterBuilder builder() { * @since 1.29.0 */ public OtlpHttpSpanExporterBuilder toBuilder() { - return new OtlpHttpSpanExporterBuilder(builder.copy()); + return new OtlpHttpSpanExporterBuilder(builder.copy(), marshaler.getMemoryMode()); } /** @@ -72,8 +78,7 @@ public OtlpHttpSpanExporterBuilder toBuilder() { */ @Override public CompletableResultCode export(Collection spans) { - TraceRequestMarshaler exportRequest = TraceRequestMarshaler.create(spans); - return delegate.export(exportRequest, spans.size()); + return marshaler.export(spans); } /** @@ -94,6 +99,9 @@ public CompletableResultCode shutdown() { @Override public String toString() { - return "OtlpHttpSpanExporter{" + builder.toString(false) + "}"; + StringJoiner joiner = new StringJoiner(", ", "OtlpHttpSpanExporter{", "}"); + joiner.add(builder.toString(false)); + joiner.add("memoryMode=" + marshaler.getMemoryMode()); + return joiner.toString(); } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterBuilder.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterBuilder.java index 3cf5b85b1cf..11bd7192db9 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterBuilder.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterBuilder.java @@ -10,12 +10,21 @@ import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.exporter.internal.compression.Compressor; +import io.opentelemetry.exporter.internal.compression.CompressorProvider; +import io.opentelemetry.exporter.internal.compression.CompressorUtil; import io.opentelemetry.exporter.internal.http.HttpExporterBuilder; -import io.opentelemetry.exporter.internal.otlp.traces.TraceRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.exporter.otlp.internal.OtlpUserAgent; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import java.time.Duration; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -27,16 +36,19 @@ public final class OtlpHttpSpanExporterBuilder { private static final String DEFAULT_ENDPOINT = "http://localhost:4318/v1/traces"; + private static final MemoryMode DEFAULT_MEMORY_MODE = MemoryMode.REUSABLE_DATA; - private final HttpExporterBuilder delegate; + private final HttpExporterBuilder delegate; + private MemoryMode memoryMode; - OtlpHttpSpanExporterBuilder(HttpExporterBuilder delegate) { + OtlpHttpSpanExporterBuilder(HttpExporterBuilder delegate, MemoryMode memoryMode) { this.delegate = delegate; - OtlpUserAgent.addUserAgentHeader(delegate::addHeader); + this.memoryMode = memoryMode; + OtlpUserAgent.addUserAgentHeader(delegate::addConstantHeaders); } OtlpHttpSpanExporterBuilder() { - this(new HttpExporterBuilder<>("otlp", "span", DEFAULT_ENDPOINT)); + this(new HttpExporterBuilder<>("otlp", "span", DEFAULT_ENDPOINT), DEFAULT_MEMORY_MODE); } /** @@ -59,6 +71,30 @@ public OtlpHttpSpanExporterBuilder setTimeout(Duration timeout) { return setTimeout(timeout.toNanos(), TimeUnit.NANOSECONDS); } + /** + * Sets the maximum time to wait for new connections to be established. If unset, defaults to + * {@value HttpExporterBuilder#DEFAULT_CONNECT_TIMEOUT_SECS}s. + * + * @since 1.33.0 + */ + public OtlpHttpSpanExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + requireNonNull(unit, "unit"); + checkArgument(timeout >= 0, "timeout must be non-negative"); + delegate.setConnectTimeout(timeout, unit); + return this; + } + + /** + * Sets the maximum time to wait for new connections to be established. If unset, defaults to + * {@value HttpExporterBuilder#DEFAULT_CONNECT_TIMEOUT_SECS}s. + * + * @since 1.33.0 + */ + public OtlpHttpSpanExporterBuilder setConnectTimeout(Duration timeout) { + requireNonNull(timeout, "timeout"); + return setConnectTimeout(timeout.toNanos(), TimeUnit.NANOSECONDS); + } + /** * Sets the OTLP endpoint to connect to. If unset, defaults to {@value DEFAULT_ENDPOINT}. The * endpoint must start with either http:// or https://, and include the full HTTP path. @@ -70,21 +106,34 @@ public OtlpHttpSpanExporterBuilder setEndpoint(String endpoint) { } /** - * Sets the method used to compress payloads. If unset, compression is disabled. Currently - * supported compression methods include "gzip" and "none". + * Sets the method used to compress payloads. If unset, compression is disabled. Compression + * method "gzip" and "none" are supported out of the box. Support for additional compression + * methods is available by implementing {@link Compressor} and {@link CompressorProvider}. */ public OtlpHttpSpanExporterBuilder setCompression(String compressionMethod) { requireNonNull(compressionMethod, "compressionMethod"); - checkArgument( - compressionMethod.equals("gzip") || compressionMethod.equals("none"), - "Unsupported compression method. Supported compression methods include: gzip, none."); - delegate.setCompression(compressionMethod); + Compressor compressor = CompressorUtil.validateAndResolveCompressor(compressionMethod); + delegate.setCompression(compressor); return this; } - /** Add header to requests. */ + /** + * Add a constant header to requests. If the {@code key} collides with another constant header + * name or a one from {@link #setHeaders(Supplier)}, the values from both are included. + */ public OtlpHttpSpanExporterBuilder addHeader(String key, String value) { - delegate.addHeader(key, value); + delegate.addConstantHeaders(key, value); + return this; + } + + /** + * Set the supplier of headers to add to requests. If a key from the map collides with a constant + * from {@link #addHeader(String, String)}, the values from both are included. + * + * @since 1.33.0 + */ + public OtlpHttpSpanExporterBuilder setHeaders(Supplier> headerSupplier) { + delegate.setHeadersSupplier(headerSupplier); return this; } @@ -120,23 +169,87 @@ public OtlpHttpSpanExporterBuilder setSslContext( } /** - * Ses the retry policy. Retry is disabled by default. + * Set the retry policy, or {@code null} to disable retry. Retry policy is {@link + * RetryPolicy#getDefault()} by default * * @since 1.28.0 */ - public OtlpHttpSpanExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { - requireNonNull(retryPolicy, "retryPolicy"); + public OtlpHttpSpanExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { delegate.setRetryPolicy(retryPolicy); return this; } + /** + * Sets the proxy options. Proxying is disabled by default. + * + * @since 1.36.0 + */ + public OtlpHttpSpanExporterBuilder setProxy(ProxyOptions proxyOptions) { + requireNonNull(proxyOptions, "proxyOptions"); + delegate.setProxyOptions(proxyOptions); + return this; + } + /** * Sets the {@link MeterProvider} to use to collect metrics related to export. If not set, uses * {@link GlobalOpenTelemetry#getMeterProvider()}. */ public OtlpHttpSpanExporterBuilder setMeterProvider(MeterProvider meterProvider) { requireNonNull(meterProvider, "meterProvider"); - delegate.setMeterProvider(meterProvider); + setMeterProvider(() -> meterProvider); + return this; + } + + /** + * Sets the {@link MeterProvider} supplier to use to collect metrics related to export. If not + * set, uses {@link GlobalOpenTelemetry#getMeterProvider()}. + * + * @since 1.32.0 + */ + public OtlpHttpSpanExporterBuilder setMeterProvider( + Supplier meterProviderSupplier) { + requireNonNull(meterProviderSupplier, "meterProviderSupplier"); + delegate.setMeterProvider(meterProviderSupplier); + return this; + } + + /** + * Set the {@link MemoryMode}. If unset, defaults to {@link #DEFAULT_MEMORY_MODE}. + * + *

When memory mode is {@link MemoryMode#REUSABLE_DATA}, serialization is optimized to reduce + * memory allocation. + * + * @since 1.39.0 + */ + public OtlpHttpSpanExporterBuilder setMemoryMode(MemoryMode memoryMode) { + requireNonNull(memoryMode, "memoryMode"); + this.memoryMode = memoryMode; + return this; + } + + /** + * Set the {@link ClassLoader} used to load the sender API. + * + * @since 1.48.0 + */ + public OtlpHttpSpanExporterBuilder setServiceClassLoader(ClassLoader serviceClassLoader) { + requireNonNull(serviceClassLoader, "serviceClassLoader"); + delegate.setServiceClassLoader(serviceClassLoader); + return this; + } + + /** + * Set the {@link ExecutorService} used to execute requests. + * + *

NOTE: By calling this method, you are opting into managing the lifecycle of the {@code + * executorService}. {@link ExecutorService#shutdown()} will NOT be called when this exporter is + * shutdown. + * + * @since 1.49.0 + */ + public OtlpHttpSpanExporterBuilder setExecutorService(ExecutorService executorService) { + requireNonNull(executorService, "executorService"); + delegate.setExecutorService(executorService); return this; } @@ -146,6 +259,6 @@ public OtlpHttpSpanExporterBuilder setMeterProvider(MeterProvider meterProvider) * @return a new exporter's instance */ public OtlpHttpSpanExporter build() { - return new OtlpHttpSpanExporter(delegate, delegate.build()); + return new OtlpHttpSpanExporter(delegate, delegate.build(), memoryMode); } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpConfigUtil.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpConfigUtil.java index 1602cdd246b..8bfcb4b3960 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpConfigUtil.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpConfigUtil.java @@ -5,27 +5,23 @@ package io.opentelemetry.exporter.otlp.internal; -import static io.opentelemetry.sdk.metrics.Aggregation.explicitBucketHistogram; - +import io.opentelemetry.exporter.internal.ExporterBuilderUtil; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.common.export.RetryPolicy; -import io.opentelemetry.sdk.metrics.Aggregation; -import io.opentelemetry.sdk.metrics.InstrumentType; -import io.opentelemetry.sdk.metrics.data.AggregationTemporality; -import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; -import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; -import io.opentelemetry.sdk.metrics.internal.aggregator.AggregationUtil; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.net.MalformedURLException; import java.net.URL; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.time.Duration; -import java.util.Locale; import java.util.Map; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.logging.Logger; import javax.annotation.Nullable; /** @@ -34,6 +30,8 @@ */ public final class OtlpConfigUtil { + private static final Logger logger = Logger.getLogger(OtlpConfigUtil.class.getName()); + public static final String DATA_TYPE_TRACES = "traces"; public static final String DATA_TYPE_METRICS = "metrics"; public static final String DATA_TYPE_LOGS = "logs"; @@ -51,6 +49,7 @@ public static String getOtlpProtocol(String dataType, ConfigProperties config) { } /** Invoke the setters with the OTLP configuration for the {@code dataType}. */ + @SuppressWarnings("TooManyParameters") public static void configureOtlpExporterBuilder( String dataType, ConfigProperties config, @@ -60,7 +59,8 @@ public static void configureOtlpExporterBuilder( Consumer setTimeout, Consumer setTrustedCertificates, BiConsumer setClientTls, - Consumer setRetryPolicy) { + Consumer setRetryPolicy, + Consumer setMemoryMode) { String protocol = getOtlpProtocol(dataType, config); boolean isHttpProtobuf = protocol.equals(PROTOCOL_HTTP_PROTOBUF); URL endpoint = @@ -85,11 +85,7 @@ public static void configureOtlpExporterBuilder( setEndpoint.accept(endpoint.toString()); } - Map headers = config.getMap("otel.exporter.otlp." + dataType + ".headers"); - if (headers.isEmpty()) { - headers = config.getMap("otel.exporter.otlp.headers"); - } - headers.forEach(addHeader); + configureOtlpHeaders(config, dataType, addHeader); String compression = config.getString("otel.exporter.otlp." + dataType + ".compression"); if (compression == null) { @@ -118,9 +114,11 @@ public static void configureOtlpExporterBuilder( determinePropertyByType(config, "otel.exporter.otlp", dataType, "client.certificate")); if (clientKeyPath != null && clientKeyChainPath == null) { - throw new ConfigurationException("Client key provided but certification chain is missing"); + throw new ConfigurationException( + "client key provided without client certificate - both client key and client certificate must be set"); } else if (clientKeyPath == null && clientKeyChainPath != null) { - throw new ConfigurationException("Client key chain provided but key is missing"); + throw new ConfigurationException( + "client certificate provided without client key - both client key and client_certificate must be set"); } byte[] certificateBytes = readFileBytes(certificatePath); @@ -135,62 +133,30 @@ public static void configureOtlpExporterBuilder( setClientTls.accept(clientKeyBytes, clientKeyChainBytes); } - boolean retryEnabled = - config.getBoolean("otel.experimental.exporter.otlp.retry.enabled", false); - if (retryEnabled) { - setRetryPolicy.accept(RetryPolicy.getDefault()); + Boolean retryDisabled = config.getBoolean("otel.java.exporter.otlp.retry.disabled"); + if (retryDisabled != null && retryDisabled) { + setRetryPolicy.accept(null); } - } - /** - * Invoke the {@code aggregationTemporalitySelectorConsumer} with the configured {@link - * AggregationTemporality}. - */ - public static void configureOtlpAggregationTemporality( - ConfigProperties config, - Consumer aggregationTemporalitySelectorConsumer) { - String temporalityStr = config.getString("otel.exporter.otlp.metrics.temporality.preference"); - if (temporalityStr == null) { - return; - } - AggregationTemporalitySelector temporalitySelector; - switch (temporalityStr.toLowerCase(Locale.ROOT)) { - case "cumulative": - temporalitySelector = AggregationTemporalitySelector.alwaysCumulative(); - break; - case "delta": - temporalitySelector = AggregationTemporalitySelector.deltaPreferred(); - break; - case "lowmemory": - temporalitySelector = AggregationTemporalitySelector.lowMemory(); - break; - default: - throw new ConfigurationException("Unrecognized aggregation temporality: " + temporalityStr); - } - aggregationTemporalitySelectorConsumer.accept(temporalitySelector); + ExporterBuilderUtil.configureExporterMemoryMode(config, setMemoryMode); } - /** - * Invoke the {@code defaultAggregationSelectorConsumer} with the configured {@link - * DefaultAggregationSelector}. - */ - public static void configureOtlpHistogramDefaultAggregation( - ConfigProperties config, - Consumer defaultAggregationSelectorConsumer) { - String defaultHistogramAggregation = - config.getString("otel.exporter.otlp.metrics.default.histogram.aggregation"); - if (defaultHistogramAggregation == null) { - return; + static void configureOtlpHeaders( + ConfigProperties config, String dataType, BiConsumer addHeader) { + Map headers = config.getMap("otel.exporter.otlp." + dataType + ".headers"); + if (headers.isEmpty()) { + headers = config.getMap("otel.exporter.otlp.headers"); } - if (AggregationUtil.aggregationName(Aggregation.base2ExponentialBucketHistogram()) - .equalsIgnoreCase(defaultHistogramAggregation)) { - defaultAggregationSelectorConsumer.accept( - DefaultAggregationSelector.getDefault() - .with(InstrumentType.HISTOGRAM, Aggregation.base2ExponentialBucketHistogram())); - } else if (!AggregationUtil.aggregationName(explicitBucketHistogram()) - .equalsIgnoreCase(defaultHistogramAggregation)) { - throw new ConfigurationException( - "Unrecognized default histogram aggregation: " + defaultHistogramAggregation); + for (Map.Entry entry : headers.entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + try { + // headers are encoded as URL - see + // https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#specifying-headers-via-environment-variables + addHeader.accept(key, URLDecoder.decode(value, StandardCharsets.UTF_8.displayName())); + } catch (Exception e) { + throw new ConfigurationException("Cannot decode header value: " + value, e); + } } } @@ -203,7 +169,7 @@ private static URL createUrl(URL context, String spec) { } @Nullable - private static URL validateEndpoint(@Nullable String endpoint, boolean allowPath) { + static URL validateEndpoint(@Nullable String endpoint, boolean isHttpProtobuf) { if (endpoint == null) { return null; } @@ -225,15 +191,33 @@ private static URL validateEndpoint(@Nullable String endpoint, boolean allowPath throw new ConfigurationException( "OTLP endpoint must not have a fragment: " + endpointUrl.getRef()); } - if (!allowPath && (!endpointUrl.getPath().isEmpty() && !endpointUrl.getPath().equals("/"))) { + if (!isHttpProtobuf + && (!endpointUrl.getPath().isEmpty() && !endpointUrl.getPath().equals("/"))) { throw new ConfigurationException( "OTLP endpoint must not have a path: " + endpointUrl.getPath()); } + if ((endpointUrl.getPort() == 4317 && isHttpProtobuf) + || (endpointUrl.getPort() == 4318 && !isHttpProtobuf)) { + int expectedPort = isHttpProtobuf ? 4318 : 4317; + String protocol = isHttpProtobuf ? PROTOCOL_HTTP_PROTOBUF : PROTOCOL_GRPC; + logger.warning( + "OTLP exporter endpoint port is likely incorrect for protocol version \"" + + protocol + + "\". The endpoint " + + endpointUrl + + " has port " + + endpointUrl.getPort() + + ". Typically, the \"" + + protocol + + "\" version of OTLP uses port " + + expectedPort + + "."); + } return endpointUrl; } @Nullable - private static byte[] readFileBytes(@Nullable String filePath) { + static byte[] readFileBytes(@Nullable String filePath) { if (filePath == null) { return null; } @@ -241,8 +225,7 @@ private static byte[] readFileBytes(@Nullable String filePath) { if (!file.exists()) { throw new ConfigurationException("Invalid OTLP certificate/key path: " + filePath); } - try { - RandomAccessFile raf = new RandomAccessFile(file, "r"); + try (RandomAccessFile raf = new RandomAccessFile(file, "r")) { byte[] bytes = new byte[(int) raf.length()]; raf.readFully(bytes); return bytes; diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpDeclarativeConfigUtil.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpDeclarativeConfigUtil.java new file mode 100644 index 00000000000..55ed905364e --- /dev/null +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpDeclarativeConfigUtil.java @@ -0,0 +1,120 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal; + +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_HTTP_PROTOBUF; +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.configureOtlpHeaders; +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.readFileBytes; +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.validateEndpoint; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.internal.IncubatingExporterBuilderUtil; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.common.export.RetryPolicy; +import java.net.URL; +import java.time.Duration; +import java.util.Collections; +import java.util.List; +import java.util.function.BiConsumer; +import java.util.function.Consumer; + +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ +public final class OtlpDeclarativeConfigUtil { + + /** Determine the configured OTLP protocol for the {@code dataType}. */ + public static String getStructuredConfigOtlpProtocol(DeclarativeConfigProperties config) { + // NOTE: The default OTLP protocol is different for declarative config than for env var / system + // property based config. This is intentional. OpenTelemetry changed the default protocol + // recommendation from grpc to http/protobuf, but the autoconfigure's env var / system property + // based config did not update to reflect this before stabilizing, and changing is a breaking + // change requiring a major version bump. Declarative config is not yet stable and therefore can + // switch to the current default recommendation, which aligns also aligns with the behavior of + // the OpenTelemetry Java Agent 2.x+. + return config.getString("protocol", PROTOCOL_HTTP_PROTOBUF); + } + + /** Invoke the setters with the OTLP configuration for the {@code dataType}. */ + @SuppressWarnings("TooManyParameters") + public static void configureOtlpExporterBuilder( + String dataType, + DeclarativeConfigProperties config, + Consumer setEndpoint, + BiConsumer addHeader, + Consumer setCompression, + Consumer setTimeout, + Consumer setTrustedCertificates, + BiConsumer setClientTls, + Consumer setRetryPolicy, + Consumer setMemoryMode) { + String protocol = getStructuredConfigOtlpProtocol(config); + boolean isHttpProtobuf = protocol.equals(PROTOCOL_HTTP_PROTOBUF); + URL endpoint = validateEndpoint(config.getString("endpoint"), isHttpProtobuf); + if (endpoint != null) { + setEndpoint.accept(endpoint.toString()); + } + + String headerList = config.getString("headers_list"); + if (headerList != null) { + ConfigProperties headersListConfig = + DefaultConfigProperties.createFromMap( + Collections.singletonMap("otel.exporter.otlp.headers", headerList)); + configureOtlpHeaders(headersListConfig, dataType, addHeader); + } + + List headers = config.getStructuredList("headers"); + if (headers != null) { + headers.forEach( + header -> { + String name = header.getString("name"); + String value = header.getString("value"); + if (name != null && value != null) { + addHeader.accept(name, value); + } + }); + } + + String compression = config.getString("compression"); + if (compression != null) { + setCompression.accept(compression); + } + + Integer timeoutMs = config.getInt("timeout"); + if (timeoutMs != null) { + setTimeout.accept(Duration.ofMillis(timeoutMs)); + } + + String certificatePath = config.getString("certificate"); + String clientKeyPath = config.getString("client_key"); + String clientKeyChainPath = config.getString("client_certificate"); + + if (clientKeyPath != null && clientKeyChainPath == null) { + throw new ConfigurationException( + "client_key provided without client_certificate - both client_key and client_certificate must be set"); + } else if (clientKeyPath == null && clientKeyChainPath != null) { + throw new ConfigurationException( + "client_certificate provided without client_key - both client_key and client_certificate must be set"); + } + byte[] certificateBytes = readFileBytes(certificatePath); + if (certificateBytes != null) { + setTrustedCertificates.accept(certificateBytes); + } + byte[] clientKeyBytes = readFileBytes(clientKeyPath); + byte[] clientKeyChainBytes = readFileBytes(clientKeyChainPath); + if (clientKeyBytes != null && clientKeyChainBytes != null) { + setClientTls.accept(clientKeyBytes, clientKeyChainBytes); + } + + IncubatingExporterBuilderUtil.configureExporterMemoryMode(config, setMemoryMode); + } + + private OtlpDeclarativeConfigUtil() {} +} diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpLogRecordExporterComponentProvider.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpLogRecordExporterComponentProvider.java new file mode 100644 index 00000000000..e10fd8bdd34 --- /dev/null +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpLogRecordExporterComponentProvider.java @@ -0,0 +1,90 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal; + +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.DATA_TYPE_LOGS; +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_GRPC; +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_HTTP_PROTOBUF; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporter; +import io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder; +import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; +import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; + +/** + * Declarative configuration SPI implementation for {@link OtlpHttpLogRecordExporter} and {@link + * OtlpGrpcLogRecordExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public class OtlpLogRecordExporterComponentProvider + implements ComponentProvider { + + @Override + public Class getType() { + return LogRecordExporter.class; + } + + @Override + public String getName() { + return "otlp"; + } + + @Override + public LogRecordExporter create(DeclarativeConfigProperties config) { + String protocol = OtlpDeclarativeConfigUtil.getStructuredConfigOtlpProtocol(config); + + if (protocol.equals(PROTOCOL_HTTP_PROTOBUF)) { + OtlpHttpLogRecordExporterBuilder builder = httpBuilder(); + + OtlpDeclarativeConfigUtil.configureOtlpExporterBuilder( + DATA_TYPE_LOGS, + config, + builder::setEndpoint, + builder::addHeader, + builder::setCompression, + builder::setTimeout, + builder::setTrustedCertificates, + builder::setClientTls, + builder::setRetryPolicy, + builder::setMemoryMode); + + return builder.build(); + } else if (protocol.equals(PROTOCOL_GRPC)) { + OtlpGrpcLogRecordExporterBuilder builder = grpcBuilder(); + + OtlpDeclarativeConfigUtil.configureOtlpExporterBuilder( + DATA_TYPE_LOGS, + config, + builder::setEndpoint, + builder::addHeader, + builder::setCompression, + builder::setTimeout, + builder::setTrustedCertificates, + builder::setClientTls, + builder::setRetryPolicy, + builder::setMemoryMode); + + return builder.build(); + } + throw new ConfigurationException("Unsupported OTLP metrics protocol: " + protocol); + } + + // Visible for testing + OtlpHttpLogRecordExporterBuilder httpBuilder() { + return OtlpHttpLogRecordExporter.builder(); + } + + // Visible for testing + OtlpGrpcLogRecordExporterBuilder grpcBuilder() { + return OtlpGrpcLogRecordExporter.builder(); + } +} diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpLogRecordExporterProvider.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpLogRecordExporterProvider.java index ebc2c25eb44..67ed44c754a 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpLogRecordExporterProvider.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpLogRecordExporterProvider.java @@ -9,14 +9,18 @@ import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_GRPC; import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_HTTP_PROTOBUF; +import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporter; import io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder; import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder; +import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.autoconfigure.spi.internal.AutoConfigureListener; import io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider; import io.opentelemetry.sdk.logs.export.LogRecordExporter; +import java.util.concurrent.atomic.AtomicReference; /** * {@link LogRecordExporter} SPI implementation for {@link OtlpGrpcLogRecordExporter} and {@link @@ -25,7 +29,12 @@ *

This class is internal and is hence not for public use. Its APIs are unstable and can change * at any time. */ -public class OtlpLogRecordExporterProvider implements ConfigurableLogRecordExporterProvider { +public class OtlpLogRecordExporterProvider + implements ConfigurableLogRecordExporterProvider, AutoConfigureListener { + + private final AtomicReference meterProviderRef = + new AtomicReference<>(MeterProvider.noop()); + @Override public LogRecordExporter createExporter(ConfigProperties config) { String protocol = OtlpConfigUtil.getOtlpProtocol(DATA_TYPE_LOGS, config); @@ -42,7 +51,9 @@ public LogRecordExporter createExporter(ConfigProperties config) { builder::setTimeout, builder::setTrustedCertificates, builder::setClientTls, - builder::setRetryPolicy); + builder::setRetryPolicy, + builder::setMemoryMode); + builder.setMeterProvider(meterProviderRef::get); return builder.build(); } else if (protocol.equals(PROTOCOL_GRPC)) { @@ -57,7 +68,9 @@ public LogRecordExporter createExporter(ConfigProperties config) { builder::setTimeout, builder::setTrustedCertificates, builder::setClientTls, - builder::setRetryPolicy); + builder::setRetryPolicy, + builder::setMemoryMode); + builder.setMeterProvider(meterProviderRef::get); return builder.build(); } @@ -78,4 +91,9 @@ OtlpHttpLogRecordExporterBuilder httpBuilder() { OtlpGrpcLogRecordExporterBuilder grpcBuilder() { return OtlpGrpcLogRecordExporter.builder(); } + + @Override + public void afterAutoConfigure(OpenTelemetrySdk sdk) { + meterProviderRef.set(sdk.getMeterProvider()); + } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpMetricExporterComponentProvider.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpMetricExporterComponentProvider.java new file mode 100644 index 00000000000..c3b17710708 --- /dev/null +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpMetricExporterComponentProvider.java @@ -0,0 +1,98 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal; + +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.DATA_TYPE_METRICS; +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_GRPC; +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_HTTP_PROTOBUF; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.internal.IncubatingExporterBuilderUtil; +import io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporter; +import io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder; +import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; +import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.metrics.export.MetricExporter; + +/** + * Declarative configuration SPI implementation for {@link OtlpHttpMetricExporter} and {@link + * OtlpGrpcMetricExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public class OtlpMetricExporterComponentProvider implements ComponentProvider { + + @Override + public Class getType() { + return MetricExporter.class; + } + + @Override + public String getName() { + return "otlp"; + } + + @Override + public MetricExporter create(DeclarativeConfigProperties config) { + String protocol = OtlpDeclarativeConfigUtil.getStructuredConfigOtlpProtocol(config); + + if (protocol.equals(PROTOCOL_HTTP_PROTOBUF)) { + OtlpHttpMetricExporterBuilder builder = httpBuilder(); + + OtlpDeclarativeConfigUtil.configureOtlpExporterBuilder( + DATA_TYPE_METRICS, + config, + builder::setEndpoint, + builder::addHeader, + builder::setCompression, + builder::setTimeout, + builder::setTrustedCertificates, + builder::setClientTls, + builder::setRetryPolicy, + builder::setMemoryMode); + IncubatingExporterBuilderUtil.configureOtlpAggregationTemporality( + config, builder::setAggregationTemporalitySelector); + IncubatingExporterBuilderUtil.configureOtlpHistogramDefaultAggregation( + config, builder::setDefaultAggregationSelector); + + return builder.build(); + } else if (protocol.equals(PROTOCOL_GRPC)) { + OtlpGrpcMetricExporterBuilder builder = grpcBuilder(); + + OtlpDeclarativeConfigUtil.configureOtlpExporterBuilder( + DATA_TYPE_METRICS, + config, + builder::setEndpoint, + builder::addHeader, + builder::setCompression, + builder::setTimeout, + builder::setTrustedCertificates, + builder::setClientTls, + builder::setRetryPolicy, + builder::setMemoryMode); + IncubatingExporterBuilderUtil.configureOtlpAggregationTemporality( + config, builder::setAggregationTemporalitySelector); + IncubatingExporterBuilderUtil.configureOtlpHistogramDefaultAggregation( + config, builder::setDefaultAggregationSelector); + + return builder.build(); + } + throw new ConfigurationException("Unsupported OTLP metrics protocol: " + protocol); + } + + // Visible for testing + OtlpHttpMetricExporterBuilder httpBuilder() { + return OtlpHttpMetricExporter.builder(); + } + + // Visible for testing + OtlpGrpcMetricExporterBuilder grpcBuilder() { + return OtlpGrpcMetricExporter.builder(); + } +} diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpMetricExporterProvider.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpMetricExporterProvider.java index 48111b8d84a..a60f57a250c 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpMetricExporterProvider.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpMetricExporterProvider.java @@ -9,6 +9,7 @@ import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_GRPC; import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_HTTP_PROTOBUF; +import io.opentelemetry.exporter.internal.ExporterBuilderUtil; import io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporter; import io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder; import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; @@ -26,6 +27,7 @@ * at any time. */ public class OtlpMetricExporterProvider implements ConfigurableMetricExporterProvider { + @Override public MetricExporter createExporter(ConfigProperties config) { String protocol = OtlpConfigUtil.getOtlpProtocol(DATA_TYPE_METRICS, config); @@ -42,10 +44,11 @@ public MetricExporter createExporter(ConfigProperties config) { builder::setTimeout, builder::setTrustedCertificates, builder::setClientTls, - builder::setRetryPolicy); - OtlpConfigUtil.configureOtlpAggregationTemporality( + builder::setRetryPolicy, + builder::setMemoryMode); + ExporterBuilderUtil.configureOtlpAggregationTemporality( config, builder::setAggregationTemporalitySelector); - OtlpConfigUtil.configureOtlpHistogramDefaultAggregation( + ExporterBuilderUtil.configureOtlpHistogramDefaultAggregation( config, builder::setDefaultAggregationSelector); return builder.build(); @@ -61,10 +64,11 @@ public MetricExporter createExporter(ConfigProperties config) { builder::setTimeout, builder::setTrustedCertificates, builder::setClientTls, - builder::setRetryPolicy); - OtlpConfigUtil.configureOtlpAggregationTemporality( + builder::setRetryPolicy, + builder::setMemoryMode); + ExporterBuilderUtil.configureOtlpAggregationTemporality( config, builder::setAggregationTemporalitySelector); - OtlpConfigUtil.configureOtlpHistogramDefaultAggregation( + ExporterBuilderUtil.configureOtlpHistogramDefaultAggregation( config, builder::setDefaultAggregationSelector); return builder.build(); diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpSpanExporterComponentProvider.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpSpanExporterComponentProvider.java new file mode 100644 index 00000000000..befa77c6fd3 --- /dev/null +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpSpanExporterComponentProvider.java @@ -0,0 +1,89 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal; + +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.DATA_TYPE_TRACES; +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_GRPC; +import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_HTTP_PROTOBUF; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporter; +import io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder; +import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; +import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.trace.export.SpanExporter; + +/** + * Declarative configuration SPI implementation for {@link OtlpHttpSpanExporter} and {@link + * OtlpGrpcSpanExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public class OtlpSpanExporterComponentProvider implements ComponentProvider { + + @Override + public Class getType() { + return SpanExporter.class; + } + + @Override + public String getName() { + return "otlp"; + } + + @Override + public SpanExporter create(DeclarativeConfigProperties config) { + String protocol = OtlpDeclarativeConfigUtil.getStructuredConfigOtlpProtocol(config); + + if (protocol.equals(PROTOCOL_HTTP_PROTOBUF)) { + OtlpHttpSpanExporterBuilder builder = httpBuilder(); + + OtlpDeclarativeConfigUtil.configureOtlpExporterBuilder( + DATA_TYPE_TRACES, + config, + builder::setEndpoint, + builder::addHeader, + builder::setCompression, + builder::setTimeout, + builder::setTrustedCertificates, + builder::setClientTls, + builder::setRetryPolicy, + builder::setMemoryMode); + + return builder.build(); + } else if (protocol.equals(PROTOCOL_GRPC)) { + OtlpGrpcSpanExporterBuilder builder = grpcBuilder(); + + OtlpDeclarativeConfigUtil.configureOtlpExporterBuilder( + DATA_TYPE_TRACES, + config, + builder::setEndpoint, + builder::addHeader, + builder::setCompression, + builder::setTimeout, + builder::setTrustedCertificates, + builder::setClientTls, + builder::setRetryPolicy, + builder::setMemoryMode); + + return builder.build(); + } + throw new ConfigurationException("Unsupported OTLP metrics protocol: " + protocol); + } + + // Visible for testing + OtlpHttpSpanExporterBuilder httpBuilder() { + return OtlpHttpSpanExporter.builder(); + } + + // Visible for testing + OtlpGrpcSpanExporterBuilder grpcBuilder() { + return OtlpGrpcSpanExporter.builder(); + } +} diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpSpanExporterProvider.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpSpanExporterProvider.java index 4c43dd2a82a..e6fad237d85 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpSpanExporterProvider.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/internal/OtlpSpanExporterProvider.java @@ -9,14 +9,18 @@ import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_GRPC; import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_HTTP_PROTOBUF; +import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporter; import io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder; import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder; +import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.autoconfigure.spi.internal.AutoConfigureListener; import io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider; import io.opentelemetry.sdk.trace.export.SpanExporter; +import java.util.concurrent.atomic.AtomicReference; /** * {@link SpanExporter} SPI implementation for {@link OtlpGrpcSpanExporter} and {@link @@ -25,7 +29,12 @@ *

This class is internal and is hence not for public use. Its APIs are unstable and can change * at any time. */ -public class OtlpSpanExporterProvider implements ConfigurableSpanExporterProvider { +public class OtlpSpanExporterProvider + implements ConfigurableSpanExporterProvider, AutoConfigureListener { + + private final AtomicReference meterProviderRef = + new AtomicReference<>(MeterProvider.noop()); + @Override public SpanExporter createExporter(ConfigProperties config) { String protocol = OtlpConfigUtil.getOtlpProtocol(DATA_TYPE_TRACES, config); @@ -41,7 +50,9 @@ public SpanExporter createExporter(ConfigProperties config) { builder::setTimeout, builder::setTrustedCertificates, builder::setClientTls, - builder::setRetryPolicy); + builder::setRetryPolicy, + builder::setMemoryMode); + builder.setMeterProvider(meterProviderRef::get); return builder.build(); } else if (protocol.equals(PROTOCOL_GRPC)) { @@ -56,7 +67,9 @@ public SpanExporter createExporter(ConfigProperties config) { builder::setTimeout, builder::setTrustedCertificates, builder::setClientTls, - builder::setRetryPolicy); + builder::setRetryPolicy, + builder::setMemoryMode); + builder.setMeterProvider(meterProviderRef::get); return builder.build(); } @@ -77,4 +90,9 @@ OtlpHttpSpanExporterBuilder httpBuilder() { OtlpGrpcSpanExporterBuilder grpcBuilder() { return OtlpGrpcSpanExporter.builder(); } + + @Override + public void afterAutoConfigure(OpenTelemetrySdk sdk) { + meterProviderRef.set(sdk.getMeterProvider()); + } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/logs/MarshalerLogsServiceGrpc.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/logs/MarshalerLogsServiceGrpc.java index 06d7da50bff..451e5abae32 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/logs/MarshalerLogsServiceGrpc.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/logs/MarshalerLogsServiceGrpc.java @@ -14,7 +14,7 @@ import io.grpc.stub.ClientCalls; import io.opentelemetry.exporter.internal.grpc.MarshalerInputStream; import io.opentelemetry.exporter.internal.grpc.MarshalerServiceStub; -import io.opentelemetry.exporter.internal.otlp.logs.LogsRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import java.io.InputStream; import javax.annotation.Nullable; @@ -23,15 +23,15 @@ final class MarshalerLogsServiceGrpc { private static final String SERVICE_NAME = "opentelemetry.proto.collector.logs.v1.LogsService"; - private static final MethodDescriptor.Marshaller REQUEST_MARSHALLER = - new MethodDescriptor.Marshaller() { + private static final MethodDescriptor.Marshaller REQUEST_MARSHALLER = + new MethodDescriptor.Marshaller() { @Override - public InputStream stream(LogsRequestMarshaler value) { + public InputStream stream(Marshaler value) { return new MarshalerInputStream(value); } @Override - public LogsRequestMarshaler parse(InputStream stream) { + public Marshaler parse(InputStream stream) { throw new UnsupportedOperationException("Only for serializing"); } }; @@ -49,14 +49,13 @@ public ExportLogsServiceResponse parse(InputStream stream) { } }; - private static final MethodDescriptor - getExportMethod = - MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.UNARY) - .setFullMethodName(generateFullMethodName(SERVICE_NAME, "Export")) - .setRequestMarshaller(REQUEST_MARSHALLER) - .setResponseMarshaller(RESPONSE_MARSHALER) - .build(); + private static final MethodDescriptor getExportMethod = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "Export")) + .setRequestMarshaller(REQUEST_MARSHALLER) + .setResponseMarshaller(RESPONSE_MARSHALER) + .build(); static LogsServiceFutureStub newFutureStub(Channel channel, @Nullable String authorityOverride) { return LogsServiceFutureStub.newStub( @@ -65,8 +64,7 @@ static LogsServiceFutureStub newFutureStub(Channel channel, @Nullable String aut } static final class LogsServiceFutureStub - extends MarshalerServiceStub< - LogsRequestMarshaler, ExportLogsServiceResponse, LogsServiceFutureStub> { + extends MarshalerServiceStub { private LogsServiceFutureStub(Channel channel, CallOptions callOptions) { super(channel, callOptions); } @@ -78,7 +76,7 @@ protected MarshalerLogsServiceGrpc.LogsServiceFutureStub build( } @Override - public ListenableFuture export(LogsRequestMarshaler request) { + public ListenableFuture export(Marshaler request) { return ClientCalls.futureUnaryCall( getChannel().newCall(getExportMethod, getCallOptions()), request); } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/logs/OtlpGrpcLogRecordExporter.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/logs/OtlpGrpcLogRecordExporter.java index a97a9553d13..e85cb76b78f 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/logs/OtlpGrpcLogRecordExporter.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/logs/OtlpGrpcLogRecordExporter.java @@ -7,11 +7,14 @@ import io.opentelemetry.exporter.internal.grpc.GrpcExporter; import io.opentelemetry.exporter.internal.grpc.GrpcExporterBuilder; -import io.opentelemetry.exporter.internal.otlp.logs.LogsRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.otlp.logs.LogReusableDataMarshaler; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.logs.data.LogRecordData; import io.opentelemetry.sdk.logs.export.LogRecordExporter; import java.util.Collection; +import java.util.StringJoiner; import javax.annotation.concurrent.ThreadSafe; /** @@ -22,8 +25,9 @@ @ThreadSafe public final class OtlpGrpcLogRecordExporter implements LogRecordExporter { - private final GrpcExporterBuilder builder; - private final GrpcExporter delegate; + private final GrpcExporterBuilder builder; + private final GrpcExporter delegate; + private final LogReusableDataMarshaler marshaler; /** * Returns a new {@link OtlpGrpcLogRecordExporter} using the default values. @@ -47,10 +51,12 @@ public static OtlpGrpcLogRecordExporterBuilder builder() { } OtlpGrpcLogRecordExporter( - GrpcExporterBuilder builder, - GrpcExporter delegate) { + GrpcExporterBuilder builder, + GrpcExporter delegate, + MemoryMode memoryMode) { this.builder = builder; this.delegate = delegate; + this.marshaler = new LogReusableDataMarshaler(memoryMode, delegate::export); } /** @@ -61,7 +67,7 @@ public static OtlpGrpcLogRecordExporterBuilder builder() { * @since 1.29.0 */ public OtlpGrpcLogRecordExporterBuilder toBuilder() { - return new OtlpGrpcLogRecordExporterBuilder(builder.copy()); + return new OtlpGrpcLogRecordExporterBuilder(builder.copy(), marshaler.getMemoryMode()); } /** @@ -72,8 +78,7 @@ public OtlpGrpcLogRecordExporterBuilder toBuilder() { */ @Override public CompletableResultCode export(Collection logs) { - LogsRequestMarshaler request = LogsRequestMarshaler.create(logs); - return delegate.export(request, logs.size()); + return marshaler.export(logs); } @Override @@ -92,6 +97,9 @@ public CompletableResultCode shutdown() { @Override public String toString() { - return "OtlpGrpcLogRecordExporter{" + builder.toString(false) + "}"; + StringJoiner joiner = new StringJoiner(", ", "OtlpGrpcLogRecordExporter{", "}"); + joiner.add(builder.toString(false)); + joiner.add("memoryMode=" + marshaler.getMemoryMode()); + return joiner.toString(); } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/logs/OtlpGrpcLogRecordExporterBuilder.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/logs/OtlpGrpcLogRecordExporterBuilder.java index 97b193bab3f..a26629ed7f4 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/logs/OtlpGrpcLogRecordExporterBuilder.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/logs/OtlpGrpcLogRecordExporterBuilder.java @@ -11,13 +11,21 @@ import io.grpc.ManagedChannel; import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.exporter.internal.compression.Compressor; +import io.opentelemetry.exporter.internal.compression.CompressorProvider; +import io.opentelemetry.exporter.internal.compression.CompressorUtil; import io.opentelemetry.exporter.internal.grpc.GrpcExporterBuilder; -import io.opentelemetry.exporter.internal.otlp.logs.LogsRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.exporter.otlp.internal.OtlpUserAgent; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.common.export.RetryPolicy; import java.net.URI; import java.time.Duration; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -36,13 +44,16 @@ public final class OtlpGrpcLogRecordExporterBuilder { private static final String DEFAULT_ENDPOINT_URL = "http://localhost:4317"; private static final URI DEFAULT_ENDPOINT = URI.create(DEFAULT_ENDPOINT_URL); private static final long DEFAULT_TIMEOUT_SECS = 10; + private static final MemoryMode DEFAULT_MEMORY_MODE = MemoryMode.REUSABLE_DATA; // Visible for testing - final GrpcExporterBuilder delegate; + final GrpcExporterBuilder delegate; + private MemoryMode memoryMode; - OtlpGrpcLogRecordExporterBuilder(GrpcExporterBuilder delegate) { + OtlpGrpcLogRecordExporterBuilder(GrpcExporterBuilder delegate, MemoryMode memoryMode) { this.delegate = delegate; - OtlpUserAgent.addUserAgentHeader(delegate::addHeader); + this.memoryMode = memoryMode; + OtlpUserAgent.addUserAgentHeader(delegate::addConstantHeader); } OtlpGrpcLogRecordExporterBuilder() { @@ -53,7 +64,8 @@ public final class OtlpGrpcLogRecordExporterBuilder { DEFAULT_TIMEOUT_SECS, DEFAULT_ENDPOINT, () -> MarshalerLogsServiceGrpc::newFutureStub, - GRPC_ENDPOINT_PATH)); + GRPC_ENDPOINT_PATH), + DEFAULT_MEMORY_MODE); } /** @@ -96,6 +108,30 @@ public OtlpGrpcLogRecordExporterBuilder setTimeout(Duration timeout) { return this; } + /** + * Sets the maximum time to wait for new connections to be established. If unset, defaults to + * {@value GrpcExporterBuilder#DEFAULT_CONNECT_TIMEOUT_SECS}s. + * + * @since 1.36.0 + */ + public OtlpGrpcLogRecordExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + requireNonNull(unit, "unit"); + checkArgument(timeout >= 0, "timeout must be non-negative"); + delegate.setConnectTimeout(timeout, unit); + return this; + } + + /** + * Sets the maximum time to wait for new connections to be established. If unset, defaults to + * {@value GrpcExporterBuilder#DEFAULT_CONNECT_TIMEOUT_SECS}s. + * + * @since 1.36.0 + */ + public OtlpGrpcLogRecordExporterBuilder setConnectTimeout(Duration timeout) { + requireNonNull(timeout, "timeout"); + return setConnectTimeout(timeout.toNanos(), TimeUnit.NANOSECONDS); + } + /** * Sets the OTLP endpoint to connect to. If unset, defaults to {@value DEFAULT_ENDPOINT_URL}. The * endpoint must start with either http:// or https://. @@ -107,15 +143,14 @@ public OtlpGrpcLogRecordExporterBuilder setEndpoint(String endpoint) { } /** - * Sets the method used to compress payloads. If unset, compression is disabled. Currently - * supported compression methods include "gzip" and "none". + * Sets the method used to compress payloads. If unset, compression is disabled. Compression + * method "gzip" and "none" are supported out of the box. Support for additional compression + * methods is available by implementing {@link Compressor} and {@link CompressorProvider}. */ public OtlpGrpcLogRecordExporterBuilder setCompression(String compressionMethod) { requireNonNull(compressionMethod, "compressionMethod"); - checkArgument( - compressionMethod.equals("gzip") || compressionMethod.equals("none"), - "Unsupported compression method. Supported compression methods include: gzip, none."); - delegate.setCompression(compressionMethod); + Compressor compressor = CompressorUtil.validateAndResolveCompressor(compressionMethod); + delegate.setCompression(compressor); return this; } @@ -150,25 +185,39 @@ public OtlpGrpcLogRecordExporterBuilder setSslContext( } /** - * Add header to request. Optional. Applicable only if {@link - * OtlpGrpcLogRecordExporterBuilder#setChannel(ManagedChannel)} is not used to set channel. + * Add a constant header to requests. If the {@code key} collides with another constant header + * name or a one from {@link #setHeaders(Supplier)}, the values from both are included. Applicable + * only if {@link OtlpGrpcLogRecordExporterBuilder#setChannel(ManagedChannel)} is not used to set + * channel. * * @param key header key * @param value header value * @return this builder's instance */ public OtlpGrpcLogRecordExporterBuilder addHeader(String key, String value) { - delegate.addHeader(key, value); + delegate.addConstantHeader(key, value); return this; } /** - * Ses the retry policy. Retry is disabled by default. + * Set the supplier of headers to add to requests. If a key from the map collides with a constant + * from {@link #addHeader(String, String)}, the values from both are included. Applicable only if + * {@link OtlpGrpcLogRecordExporterBuilder#setChannel(ManagedChannel)} is not used to set channel. + * + * @since 1.33.0 + */ + public OtlpGrpcLogRecordExporterBuilder setHeaders(Supplier> headerSupplier) { + delegate.setHeadersSupplier(headerSupplier); + return this; + } + + /** + * Set the retry policy, or {@code null} to disable retry. Retry policy is {@link + * RetryPolicy#getDefault()} by default * * @since 1.28.0 */ - public OtlpGrpcLogRecordExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { - requireNonNull(retryPolicy, "retryPolicy"); + public OtlpGrpcLogRecordExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { delegate.setRetryPolicy(retryPolicy); return this; } @@ -179,7 +228,60 @@ public OtlpGrpcLogRecordExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) */ public OtlpGrpcLogRecordExporterBuilder setMeterProvider(MeterProvider meterProvider) { requireNonNull(meterProvider, "meterProvider"); - delegate.setMeterProvider(meterProvider); + setMeterProvider(() -> meterProvider); + return this; + } + + /** + * Sets the {@link MeterProvider} supplier used to collect metrics related to export. If not set, + * uses {@link GlobalOpenTelemetry#getMeterProvider()}. + * + * @since 1.32.0 + */ + public OtlpGrpcLogRecordExporterBuilder setMeterProvider( + Supplier meterProviderSupplier) { + requireNonNull(meterProviderSupplier, "meterProviderSupplier"); + delegate.setMeterProvider(meterProviderSupplier); + return this; + } + + /** + * Set the {@link MemoryMode}. If unset, defaults to {@link #DEFAULT_MEMORY_MODE}. + * + *

When memory mode is {@link MemoryMode#REUSABLE_DATA}, serialization is optimized to reduce + * memory allocation. + * + * @since 1.39.0 + */ + public OtlpGrpcLogRecordExporterBuilder setMemoryMode(MemoryMode memoryMode) { + requireNonNull(memoryMode, "memoryMode"); + this.memoryMode = memoryMode; + return this; + } + + /** + * Set the {@link ClassLoader} used to load the sender API. + * + * @since 1.48.0 + */ + public OtlpGrpcLogRecordExporterBuilder setServiceClassLoader(ClassLoader serviceClassLoader) { + requireNonNull(serviceClassLoader, "serviceClassLoader"); + delegate.setServiceClassLoader(serviceClassLoader); + return this; + } + + /** + * Set the {@link ExecutorService} used to execute requests. + * + *

NOTE: By calling this method, you are opting into managing the lifecycle of the {@code + * executorService}. {@link ExecutorService#shutdown()} will NOT be called when this exporter is + * shutdown. + * + * @since 1.49.0 + */ + public OtlpGrpcLogRecordExporterBuilder setExecutorService(ExecutorService executorService) { + requireNonNull(executorService, "executorService"); + delegate.setExecutorService(executorService); return this; } @@ -189,6 +291,6 @@ public OtlpGrpcLogRecordExporterBuilder setMeterProvider(MeterProvider meterProv * @return a new exporter's instance */ public OtlpGrpcLogRecordExporter build() { - return new OtlpGrpcLogRecordExporter(delegate, delegate.build()); + return new OtlpGrpcLogRecordExporter(delegate, delegate.build(), memoryMode); } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/metrics/MarshalerMetricsServiceGrpc.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/metrics/MarshalerMetricsServiceGrpc.java index 240ad81beb2..af70c6bd175 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/metrics/MarshalerMetricsServiceGrpc.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/metrics/MarshalerMetricsServiceGrpc.java @@ -14,7 +14,7 @@ import io.grpc.stub.ClientCalls; import io.opentelemetry.exporter.internal.grpc.MarshalerInputStream; import io.opentelemetry.exporter.internal.grpc.MarshalerServiceStub; -import io.opentelemetry.exporter.internal.otlp.metrics.MetricsRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import java.io.InputStream; import javax.annotation.Nullable; @@ -24,15 +24,15 @@ final class MarshalerMetricsServiceGrpc { private static final String SERVICE_NAME = "opentelemetry.proto.collector.metrics.v1.MetricsService"; - private static final MethodDescriptor.Marshaller REQUEST_MARSHALLER = - new MethodDescriptor.Marshaller() { + private static final MethodDescriptor.Marshaller REQUEST_MARSHALLER = + new MethodDescriptor.Marshaller() { @Override - public InputStream stream(MetricsRequestMarshaler value) { + public InputStream stream(Marshaler value) { return new MarshalerInputStream(value); } @Override - public MetricsRequestMarshaler parse(InputStream stream) { + public Marshaler parse(InputStream stream) { throw new UnsupportedOperationException("Only for serializing"); } }; @@ -51,14 +51,13 @@ public ExportMetricsServiceResponse parse(InputStream stream) { } }; - private static final MethodDescriptor - getExportMethod = - MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.UNARY) - .setFullMethodName(generateFullMethodName(SERVICE_NAME, "Export")) - .setRequestMarshaller(REQUEST_MARSHALLER) - .setResponseMarshaller(RESPONSE_MARSHALER) - .build(); + private static final MethodDescriptor getExportMethod = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "Export")) + .setRequestMarshaller(REQUEST_MARSHALLER) + .setResponseMarshaller(RESPONSE_MARSHALER) + .build(); static MetricsServiceFutureStub newFutureStub( Channel channel, @Nullable String authorityOverride) { @@ -69,7 +68,7 @@ static MetricsServiceFutureStub newFutureStub( static final class MetricsServiceFutureStub extends MarshalerServiceStub< - MetricsRequestMarshaler, ExportMetricsServiceResponse, MetricsServiceFutureStub> { + Marshaler, ExportMetricsServiceResponse, MetricsServiceFutureStub> { private MetricsServiceFutureStub(Channel channel, CallOptions callOptions) { super(channel, callOptions); } @@ -81,7 +80,7 @@ protected MarshalerMetricsServiceGrpc.MetricsServiceFutureStub build( } @Override - public ListenableFuture export(MetricsRequestMarshaler request) { + public ListenableFuture export(Marshaler request) { return ClientCalls.futureUnaryCall( getChannel().newCall(getExportMethod, getCallOptions()), request); } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporter.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporter.java index e006675f28c..5dd48db4908 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporter.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporter.java @@ -7,8 +7,10 @@ import io.opentelemetry.exporter.internal.grpc.GrpcExporter; import io.opentelemetry.exporter.internal.grpc.GrpcExporterBuilder; -import io.opentelemetry.exporter.internal.otlp.metrics.MetricsRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.otlp.metrics.MetricReusableDataMarshaler; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; @@ -17,6 +19,7 @@ import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; import io.opentelemetry.sdk.metrics.export.MetricExporter; import java.util.Collection; +import java.util.StringJoiner; import javax.annotation.concurrent.ThreadSafe; /** @@ -27,10 +30,11 @@ @ThreadSafe public final class OtlpGrpcMetricExporter implements MetricExporter { - private final GrpcExporterBuilder builder; - private final GrpcExporter delegate; + private final GrpcExporterBuilder builder; + private final GrpcExporter delegate; private final AggregationTemporalitySelector aggregationTemporalitySelector; private final DefaultAggregationSelector defaultAggregationSelector; + private final MetricReusableDataMarshaler marshaler; /** * Returns a new {@link OtlpGrpcMetricExporter} using the default values. @@ -54,14 +58,16 @@ public static OtlpGrpcMetricExporterBuilder builder() { } OtlpGrpcMetricExporter( - GrpcExporterBuilder builder, - GrpcExporter delegate, + GrpcExporterBuilder builder, + GrpcExporter delegate, AggregationTemporalitySelector aggregationTemporalitySelector, - DefaultAggregationSelector defaultAggregationSelector) { + DefaultAggregationSelector defaultAggregationSelector, + MemoryMode memoryMode) { this.builder = builder; this.delegate = delegate; this.aggregationTemporalitySelector = aggregationTemporalitySelector; this.defaultAggregationSelector = defaultAggregationSelector; + this.marshaler = new MetricReusableDataMarshaler(memoryMode, delegate::export); } /** @@ -72,7 +78,7 @@ public static OtlpGrpcMetricExporterBuilder builder() { * @since 1.29.0 */ public OtlpGrpcMetricExporterBuilder toBuilder() { - return new OtlpGrpcMetricExporterBuilder(builder.copy()); + return new OtlpGrpcMetricExporterBuilder(builder.copy(), marshaler.getMemoryMode()); } @Override @@ -85,6 +91,11 @@ public Aggregation getDefaultAggregation(InstrumentType instrumentType) { return defaultAggregationSelector.getDefaultAggregation(instrumentType); } + @Override + public MemoryMode getMemoryMode() { + return marshaler.getMemoryMode(); + } + /** * Submits all the given metrics in a single batch to the OpenTelemetry collector. * @@ -93,9 +104,7 @@ public Aggregation getDefaultAggregation(InstrumentType instrumentType) { */ @Override public CompletableResultCode export(Collection metrics) { - MetricsRequestMarshaler request = MetricsRequestMarshaler.create(metrics); - - return delegate.export(request, metrics.size()); + return marshaler.export(metrics); } /** @@ -119,6 +128,15 @@ public CompletableResultCode shutdown() { @Override public String toString() { - return "OtlpGrpcMetricExporter{" + builder.toString(false) + "}"; + StringJoiner joiner = new StringJoiner(", ", "OtlpGrpcMetricExporter{", "}"); + joiner.add(builder.toString(false)); + joiner.add( + "aggregationTemporalitySelector=" + + AggregationTemporalitySelector.asString(aggregationTemporalitySelector)); + joiner.add( + "defaultAggregationSelector=" + + DefaultAggregationSelector.asString(defaultAggregationSelector)); + joiner.add("memoryMode=" + marshaler.getMemoryMode()); + return joiner.toString(); } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporterBuilder.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporterBuilder.java index 5059d111987..d6062d26621 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporterBuilder.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporterBuilder.java @@ -10,17 +10,27 @@ import io.grpc.ManagedChannel; import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.exporter.internal.compression.Compressor; +import io.opentelemetry.exporter.internal.compression.CompressorProvider; +import io.opentelemetry.exporter.internal.compression.CompressorUtil; import io.opentelemetry.exporter.internal.grpc.GrpcExporterBuilder; -import io.opentelemetry.exporter.internal.otlp.metrics.MetricsRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.exporter.otlp.internal.OtlpUserAgent; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.common.export.RetryPolicy; import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; import io.opentelemetry.sdk.metrics.export.MetricExporter; import java.net.URI; import java.time.Duration; +import java.util.Collection; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -41,20 +51,23 @@ public final class OtlpGrpcMetricExporterBuilder { private static final long DEFAULT_TIMEOUT_SECS = 10; private static final AggregationTemporalitySelector DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR = AggregationTemporalitySelector.alwaysCumulative(); + private static final MemoryMode DEFAULT_MEMORY_MODE = MemoryMode.REUSABLE_DATA; // Visible for testing - final GrpcExporterBuilder delegate; + final GrpcExporterBuilder delegate; private AggregationTemporalitySelector aggregationTemporalitySelector = DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR; private DefaultAggregationSelector defaultAggregationSelector = DefaultAggregationSelector.getDefault(); + private MemoryMode memoryMode; - OtlpGrpcMetricExporterBuilder(GrpcExporterBuilder delegate) { + OtlpGrpcMetricExporterBuilder(GrpcExporterBuilder delegate, MemoryMode memoryMode) { this.delegate = delegate; - delegate.setMeterProvider(MeterProvider.noop()); - OtlpUserAgent.addUserAgentHeader(delegate::addHeader); + this.memoryMode = memoryMode; + delegate.setMeterProvider(MeterProvider::noop); + OtlpUserAgent.addUserAgentHeader(delegate::addConstantHeader); } OtlpGrpcMetricExporterBuilder() { @@ -65,7 +78,8 @@ public final class OtlpGrpcMetricExporterBuilder { DEFAULT_TIMEOUT_SECS, DEFAULT_ENDPOINT, () -> MarshalerMetricsServiceGrpc::newFutureStub, - GRPC_ENDPOINT_PATH)); + GRPC_ENDPOINT_PATH), + DEFAULT_MEMORY_MODE); } /** @@ -108,6 +122,30 @@ public OtlpGrpcMetricExporterBuilder setTimeout(Duration timeout) { return this; } + /** + * Sets the maximum time to wait for new connections to be established. If unset, defaults to + * {@value GrpcExporterBuilder#DEFAULT_CONNECT_TIMEOUT_SECS}s. + * + * @since 1.36.0 + */ + public OtlpGrpcMetricExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + requireNonNull(unit, "unit"); + checkArgument(timeout >= 0, "timeout must be non-negative"); + delegate.setConnectTimeout(timeout, unit); + return this; + } + + /** + * Sets the maximum time to wait for new connections to be established. If unset, defaults to + * {@value GrpcExporterBuilder#DEFAULT_CONNECT_TIMEOUT_SECS}s. + * + * @since 1.36.0 + */ + public OtlpGrpcMetricExporterBuilder setConnectTimeout(Duration timeout) { + requireNonNull(timeout, "timeout"); + return setConnectTimeout(timeout.toNanos(), TimeUnit.NANOSECONDS); + } + /** * Sets the OTLP endpoint to connect to. If unset, defaults to {@value DEFAULT_ENDPOINT_URL}. The * endpoint must start with either http:// or https://. @@ -119,15 +157,14 @@ public OtlpGrpcMetricExporterBuilder setEndpoint(String endpoint) { } /** - * Sets the method used to compress payloads. If unset, compression is disabled. Currently - * supported compression methods include "gzip" and "none". + * Sets the method used to compress payloads. If unset, compression is disabled. Compression + * method "gzip" and "none" are supported out of the box. Support for additional compression + * methods is available by implementing {@link Compressor} and {@link CompressorProvider}. */ public OtlpGrpcMetricExporterBuilder setCompression(String compressionMethod) { requireNonNull(compressionMethod, "compressionMethod"); - checkArgument( - compressionMethod.equals("gzip") || compressionMethod.equals("none"), - "Unsupported compression method. Supported compression methods include: gzip, none."); - delegate.setCompression(compressionMethod); + Compressor compressor = CompressorUtil.validateAndResolveCompressor(compressionMethod); + delegate.setCompression(compressor); return this; } @@ -163,15 +200,29 @@ public OtlpGrpcMetricExporterBuilder setSslContext( } /** - * Add header to request. Optional. Applicable only if {@link - * OtlpGrpcMetricExporterBuilder#setChannel(ManagedChannel)} is not used to set channel. + * Add a constant header to requests. If the {@code key} collides with another constant header + * name or a one from {@link #setHeaders(Supplier)}, the values from both are included. Applicable + * only if {@link OtlpGrpcMetricExporterBuilder#setChannel(ManagedChannel)} is not used to set + * channel. * * @param key header key * @param value header value * @return this builder's instance */ public OtlpGrpcMetricExporterBuilder addHeader(String key, String value) { - delegate.addHeader(key, value); + delegate.addConstantHeader(key, value); + return this; + } + + /** + * Set the supplier of headers to add to requests. If a key from the map collides with a constant + * from {@link #addHeader(String, String)}, the values from both are included. Applicable only if + * {@link OtlpGrpcMetricExporterBuilder#setChannel(ManagedChannel)} is not used to set channel. + * + * @since 1.33.0 + */ + public OtlpGrpcMetricExporterBuilder setHeaders(Supplier> headerSupplier) { + delegate.setHeadersSupplier(headerSupplier); return this; } @@ -207,16 +258,61 @@ public OtlpGrpcMetricExporterBuilder setDefaultAggregationSelector( } /** - * Ses the retry policy. Retry is disabled by default. + * Set the retry policy, or {@code null} to disable retry. Retry policy is {@link + * RetryPolicy#getDefault()} by default * * @since 1.28.0 */ - public OtlpGrpcMetricExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { - requireNonNull(retryPolicy, "retryPolicy"); + public OtlpGrpcMetricExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { delegate.setRetryPolicy(retryPolicy); return this; } + /** + * Set the {@link MemoryMode}. If unset, defaults to {@link #DEFAULT_MEMORY_MODE}. + * + *

When memory mode is {@link MemoryMode#REUSABLE_DATA}, serialization is optimized to reduce + * memory allocation. Additionally, the value is used for {@link MetricExporter#getMemoryMode()}, + * which sends a signal to the metrics SDK to reuse memory when possible. This is safe and + * desirable for most use cases, but should be used with caution of wrapping and delegating to the + * exporter. It is not safe for the wrapping exporter to hold onto references to {@link + * MetricData} batches since the same data structures will be reused in subsequent calls to {@link + * MetricExporter#export(Collection)}. + * + * @since 1.39.0 + */ + public OtlpGrpcMetricExporterBuilder setMemoryMode(MemoryMode memoryMode) { + requireNonNull(memoryMode, "memoryMode"); + this.memoryMode = memoryMode; + return this; + } + + /** + * Set the {@link ClassLoader} used to load the sender API. + * + * @since 1.48.0 + */ + public OtlpGrpcMetricExporterBuilder setServiceClassLoader(ClassLoader serviceClassLoader) { + requireNonNull(serviceClassLoader, "serviceClassLoader"); + delegate.setServiceClassLoader(serviceClassLoader); + return this; + } + + /** + * Set the {@link ExecutorService} used to execute requests. + * + *

NOTE: By calling this method, you are opting into managing the lifecycle of the {@code + * executorService}. {@link ExecutorService#shutdown()} will NOT be called when this exporter is + * shutdown. + * + * @since 1.49.0 + */ + public OtlpGrpcMetricExporterBuilder setExecutorService(ExecutorService executorService) { + requireNonNull(executorService, "executorService"); + delegate.setExecutorService(executorService); + return this; + } + /** * Constructs a new instance of the exporter based on the builder's values. * @@ -224,6 +320,10 @@ public OtlpGrpcMetricExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { */ public OtlpGrpcMetricExporter build() { return new OtlpGrpcMetricExporter( - delegate, delegate.build(), aggregationTemporalitySelector, defaultAggregationSelector); + delegate, + delegate.build(), + aggregationTemporalitySelector, + defaultAggregationSelector, + memoryMode); } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/trace/MarshalerTraceServiceGrpc.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/trace/MarshalerTraceServiceGrpc.java index 973bc37cb39..784eae98a49 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/trace/MarshalerTraceServiceGrpc.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/trace/MarshalerTraceServiceGrpc.java @@ -10,7 +10,7 @@ import io.grpc.MethodDescriptor; import io.opentelemetry.exporter.internal.grpc.MarshalerInputStream; import io.opentelemetry.exporter.internal.grpc.MarshalerServiceStub; -import io.opentelemetry.exporter.internal.otlp.traces.TraceRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import java.io.InputStream; import javax.annotation.Nullable; @@ -19,15 +19,15 @@ final class MarshalerTraceServiceGrpc { private static final String SERVICE_NAME = "opentelemetry.proto.collector.trace.v1.TraceService"; - private static final MethodDescriptor.Marshaller REQUEST_MARSHALLER = - new MethodDescriptor.Marshaller() { + private static final MethodDescriptor.Marshaller REQUEST_MARSHALLER = + new MethodDescriptor.Marshaller() { @Override - public InputStream stream(TraceRequestMarshaler value) { + public InputStream stream(Marshaler value) { return new MarshalerInputStream(value); } @Override - public TraceRequestMarshaler parse(InputStream stream) { + public Marshaler parse(InputStream stream) { throw new UnsupportedOperationException("Only for serializing"); } }; @@ -45,9 +45,9 @@ public ExportTraceServiceResponse parse(InputStream stream) { } }; - private static final io.grpc.MethodDescriptor + private static final io.grpc.MethodDescriptor getExportMethod = - io.grpc.MethodDescriptor.newBuilder() + io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "Export")) .setRequestMarshaller(REQUEST_MARSHALLER) @@ -62,8 +62,7 @@ static TraceServiceFutureStub newFutureStub( } static final class TraceServiceFutureStub - extends MarshalerServiceStub< - TraceRequestMarshaler, ExportTraceServiceResponse, TraceServiceFutureStub> { + extends MarshalerServiceStub { private TraceServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @@ -76,7 +75,7 @@ protected MarshalerTraceServiceGrpc.TraceServiceFutureStub build( @Override public com.google.common.util.concurrent.ListenableFuture export( - TraceRequestMarshaler request) { + Marshaler request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getExportMethod, getCallOptions()), request); } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/trace/OtlpGrpcSpanExporter.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/trace/OtlpGrpcSpanExporter.java index a249c25e76f..6d0d3d2fffa 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/trace/OtlpGrpcSpanExporter.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/trace/OtlpGrpcSpanExporter.java @@ -7,19 +7,23 @@ import io.opentelemetry.exporter.internal.grpc.GrpcExporter; import io.opentelemetry.exporter.internal.grpc.GrpcExporterBuilder; -import io.opentelemetry.exporter.internal.otlp.traces.TraceRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.otlp.traces.SpanReusableDataMarshaler; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.export.SpanExporter; import java.util.Collection; +import java.util.StringJoiner; import javax.annotation.concurrent.ThreadSafe; /** Exports spans using OTLP via gRPC, using OpenTelemetry's protobuf model. */ @ThreadSafe public final class OtlpGrpcSpanExporter implements SpanExporter { - private final GrpcExporterBuilder builder; - private final GrpcExporter delegate; + private final GrpcExporterBuilder builder; + private final GrpcExporter delegate; + private final SpanReusableDataMarshaler marshaler; /** * Returns a new {@link OtlpGrpcSpanExporter} using the default values. @@ -43,10 +47,12 @@ public static OtlpGrpcSpanExporterBuilder builder() { } OtlpGrpcSpanExporter( - GrpcExporterBuilder builder, - GrpcExporter delegate) { + GrpcExporterBuilder builder, + GrpcExporter delegate, + MemoryMode memoryMode) { this.builder = builder; this.delegate = delegate; + this.marshaler = new SpanReusableDataMarshaler(memoryMode, delegate::export); } /** @@ -57,7 +63,7 @@ public static OtlpGrpcSpanExporterBuilder builder() { * @since 1.29.0 */ public OtlpGrpcSpanExporterBuilder toBuilder() { - return new OtlpGrpcSpanExporterBuilder(builder.copy()); + return new OtlpGrpcSpanExporterBuilder(builder.copy(), marshaler.getMemoryMode()); } /** @@ -68,9 +74,7 @@ public OtlpGrpcSpanExporterBuilder toBuilder() { */ @Override public CompletableResultCode export(Collection spans) { - TraceRequestMarshaler request = TraceRequestMarshaler.create(spans); - - return delegate.export(request, spans.size()); + return marshaler.export(spans); } /** @@ -94,6 +98,9 @@ public CompletableResultCode shutdown() { @Override public String toString() { - return "OtlpGrpcSpanExporter{" + builder.toString(false) + "}"; + StringJoiner joiner = new StringJoiner(", ", "OtlpGrpcSpanExporter{", "}"); + joiner.add(builder.toString(false)); + joiner.add("memoryMode=" + marshaler.getMemoryMode()); + return joiner.toString(); } } diff --git a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/trace/OtlpGrpcSpanExporterBuilder.java b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/trace/OtlpGrpcSpanExporterBuilder.java index 9a27f97fc8b..d7452606d74 100644 --- a/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/trace/OtlpGrpcSpanExporterBuilder.java +++ b/exporters/otlp/all/src/main/java/io/opentelemetry/exporter/otlp/trace/OtlpGrpcSpanExporterBuilder.java @@ -11,13 +11,21 @@ import io.grpc.ManagedChannel; import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.exporter.internal.compression.Compressor; +import io.opentelemetry.exporter.internal.compression.CompressorProvider; +import io.opentelemetry.exporter.internal.compression.CompressorUtil; import io.opentelemetry.exporter.internal.grpc.GrpcExporterBuilder; -import io.opentelemetry.exporter.internal.otlp.traces.TraceRequestMarshaler; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.exporter.otlp.internal.OtlpUserAgent; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.common.export.RetryPolicy; import java.net.URI; import java.time.Duration; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -32,13 +40,16 @@ public final class OtlpGrpcSpanExporterBuilder { private static final String DEFAULT_ENDPOINT_URL = "http://localhost:4317"; private static final URI DEFAULT_ENDPOINT = URI.create(DEFAULT_ENDPOINT_URL); private static final long DEFAULT_TIMEOUT_SECS = 10; + private static final MemoryMode DEFAULT_MEMORY_MODE = MemoryMode.REUSABLE_DATA; // Visible for testing - final GrpcExporterBuilder delegate; + final GrpcExporterBuilder delegate; + private MemoryMode memoryMode; - OtlpGrpcSpanExporterBuilder(GrpcExporterBuilder delegate) { + OtlpGrpcSpanExporterBuilder(GrpcExporterBuilder delegate, MemoryMode memoryMode) { this.delegate = delegate; - OtlpUserAgent.addUserAgentHeader(delegate::addHeader); + this.memoryMode = memoryMode; + OtlpUserAgent.addUserAgentHeader(delegate::addConstantHeader); } OtlpGrpcSpanExporterBuilder() { @@ -49,7 +60,8 @@ public final class OtlpGrpcSpanExporterBuilder { DEFAULT_TIMEOUT_SECS, DEFAULT_ENDPOINT, () -> MarshalerTraceServiceGrpc::newFutureStub, - GRPC_ENDPOINT_PATH)); + GRPC_ENDPOINT_PATH), + DEFAULT_MEMORY_MODE); } /** @@ -92,6 +104,30 @@ public OtlpGrpcSpanExporterBuilder setTimeout(Duration timeout) { return this; } + /** + * Sets the maximum time to wait for new connections to be established. If unset, defaults to + * {@value GrpcExporterBuilder#DEFAULT_CONNECT_TIMEOUT_SECS}s. + * + * @since 1.36.0 + */ + public OtlpGrpcSpanExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + requireNonNull(unit, "unit"); + checkArgument(timeout >= 0, "timeout must be non-negative"); + delegate.setConnectTimeout(timeout, unit); + return this; + } + + /** + * Sets the maximum time to wait for new connections to be established. If unset, defaults to + * {@value GrpcExporterBuilder#DEFAULT_CONNECT_TIMEOUT_SECS}s. + * + * @since 1.36.0 + */ + public OtlpGrpcSpanExporterBuilder setConnectTimeout(Duration timeout) { + requireNonNull(timeout, "timeout"); + return setConnectTimeout(timeout.toNanos(), TimeUnit.NANOSECONDS); + } + /** * Sets the OTLP endpoint to connect to. If unset, defaults to {@value DEFAULT_ENDPOINT_URL}. The * endpoint must start with either http:// or https://. @@ -103,15 +139,14 @@ public OtlpGrpcSpanExporterBuilder setEndpoint(String endpoint) { } /** - * Sets the method used to compress payloads. If unset, compression is disabled. Currently - * supported compression methods include "gzip" and "none". + * Sets the method used to compress payloads. If unset, compression is disabled. Compression + * method "gzip" and "none" are supported out of the box. Support for additional compression + * methods is available by implementing {@link Compressor} and {@link CompressorProvider}. */ public OtlpGrpcSpanExporterBuilder setCompression(String compressionMethod) { requireNonNull(compressionMethod, "compressionMethod"); - checkArgument( - compressionMethod.equals("gzip") || compressionMethod.equals("none"), - "Unsupported compression method. Supported compression methods include: gzip, none."); - delegate.setCompression(compressionMethod); + Compressor compressor = CompressorUtil.validateAndResolveCompressor(compressionMethod); + delegate.setCompression(compressor); return this; } @@ -147,25 +182,39 @@ public OtlpGrpcSpanExporterBuilder setSslContext( } /** - * Add header to request. Optional. Applicable only if {@link - * OtlpGrpcSpanExporterBuilder#setChannel(ManagedChannel)} is not called. + * Add a constant header to requests. If the {@code key} collides with another constant header + * name or a one from {@link #setHeaders(Supplier)}, the values from both are included. Applicable + * only if {@link OtlpGrpcSpanExporterBuilder#setChannel(ManagedChannel)} is not used to set + * channel. * * @param key header key * @param value header value * @return this builder's instance */ public OtlpGrpcSpanExporterBuilder addHeader(String key, String value) { - delegate.addHeader(key, value); + delegate.addConstantHeader(key, value); return this; } /** - * Ses the retry policy. Retry is disabled by default. + * Set the supplier of headers to add to requests. If a key from the map collides with a constant + * from {@link #addHeader(String, String)}, the values from both are included. Applicable only if + * {@link OtlpGrpcSpanExporterBuilder#setChannel(ManagedChannel)} is not used to set channel. + * + * @since 1.33.0 + */ + public OtlpGrpcSpanExporterBuilder setHeaders(Supplier> headerSupplier) { + delegate.setHeadersSupplier(headerSupplier); + return this; + } + + /** + * Set the retry policy, or {@code null} to disable retry. Retry policy is {@link + * RetryPolicy#getDefault()} by default * * @since 1.28.0 */ - public OtlpGrpcSpanExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { - requireNonNull(retryPolicy, "retryPolicy"); + public OtlpGrpcSpanExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { delegate.setRetryPolicy(retryPolicy); return this; } @@ -176,7 +225,60 @@ public OtlpGrpcSpanExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { */ public OtlpGrpcSpanExporterBuilder setMeterProvider(MeterProvider meterProvider) { requireNonNull(meterProvider, "meterProvider"); - delegate.setMeterProvider(meterProvider); + setMeterProvider(() -> meterProvider); + return this; + } + + /** + * Sets the {@link MeterProvider} supplier used to collect metrics related to export. If not set, + * uses {@link GlobalOpenTelemetry#getMeterProvider()}. + * + * @since 1.32.0 + */ + public OtlpGrpcSpanExporterBuilder setMeterProvider( + Supplier meterProviderSupplier) { + requireNonNull(meterProviderSupplier, "meterProviderSupplier"); + delegate.setMeterProvider(meterProviderSupplier); + return this; + } + + /** + * Set the {@link MemoryMode}. If unset, defaults to {@link #DEFAULT_MEMORY_MODE}. + * + *

When memory mode is {@link MemoryMode#REUSABLE_DATA}, serialization is optimized to reduce + * memory allocation. + * + * @since 1.39.0 + */ + public OtlpGrpcSpanExporterBuilder setMemoryMode(MemoryMode memoryMode) { + requireNonNull(memoryMode, "memoryMode"); + this.memoryMode = memoryMode; + return this; + } + + /** + * Set the {@link ClassLoader} used to load the sender API. + * + * @since 1.48.0 + */ + public OtlpGrpcSpanExporterBuilder setServiceClassLoader(ClassLoader serviceClassLoader) { + requireNonNull(serviceClassLoader, "serviceClassLoader"); + delegate.setServiceClassLoader(serviceClassLoader); + return this; + } + + /** + * Set the {@link ExecutorService} used to execute requests. + * + *

NOTE: By calling this method, you are opting into managing the lifecycle of the {@code + * executorService}. {@link ExecutorService#shutdown()} will NOT be called when this exporter is + * shutdown. + * + * @since 1.49.0 + */ + public OtlpGrpcSpanExporterBuilder setExecutorService(ExecutorService executorService) { + requireNonNull(executorService, "executorService"); + delegate.setExecutorService(executorService); return this; } @@ -186,6 +288,6 @@ public OtlpGrpcSpanExporterBuilder setMeterProvider(MeterProvider meterProvider) * @return a new exporter's instance */ public OtlpGrpcSpanExporter build() { - return new OtlpGrpcSpanExporter(delegate, delegate.build()); + return new OtlpGrpcSpanExporter(delegate, delegate.build(), memoryMode); } } diff --git a/exporters/otlp/all/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider b/exporters/otlp/all/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider new file mode 100644 index 00000000000..239060d1f1e --- /dev/null +++ b/exporters/otlp/all/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider @@ -0,0 +1,3 @@ +io.opentelemetry.exporter.otlp.internal.OtlpMetricExporterComponentProvider +io.opentelemetry.exporter.otlp.internal.OtlpSpanExporterComponentProvider +io.opentelemetry.exporter.otlp.internal.OtlpLogRecordExporterComponentProvider diff --git a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpConfigUtilTest.java b/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpConfigUtilTest.java index b3b896018ba..5716d604ec0 100644 --- a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpConfigUtilTest.java +++ b/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpConfigUtilTest.java @@ -11,10 +11,15 @@ import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_GRPC; import static io.opentelemetry.exporter.otlp.internal.OtlpConfigUtil.PROTOCOL_HTTP_PROTOBUF; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static java.util.stream.Collectors.toList; import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.google.common.collect.ImmutableMap; +import io.github.netmikey.logunit.api.LogCapturer; +import io.opentelemetry.exporter.internal.ExporterBuilderUtil; +import io.opentelemetry.internal.testing.slf4j.SuppressLogger; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; import io.opentelemetry.sdk.metrics.Aggregation; @@ -23,13 +28,24 @@ import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Stream; +import javax.annotation.Nullable; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.slf4j.event.LoggingEvent; class OtlpConfigUtilTest { + @RegisterExtension + final LogCapturer logs = LogCapturer.create().captureForType(OtlpConfigUtil.class); + private static final String GENERIC_ENDPOINT_KEY = "otel.exporter.otlp.endpoint"; private static final String TRACES_ENDPOINT_KEY = "otel.exporter.otlp.traces.endpoint"; private static final String METRICS_ENDPOINT_KEY = "otel.exporter.otlp.metrics.endpoint"; @@ -120,6 +136,42 @@ void configureOtlpExporterBuilder_InvalidEndpoints() { .hasMessageContaining("OTLP endpoint must not have a path:"); } + @SuppressLogger(OtlpConfigUtil.class) + @ParameterizedTest + @MethodSource("misalignedOtlpPortArgs") + void configureOtlpExporterBuilder_MisalignedOtlpPort( + String protocol, String endpoint, @Nullable String expectedLog) { + configureEndpoint( + DATA_TYPE_TRACES, + ImmutableMap.of(GENERIC_ENDPOINT_KEY, endpoint, "otel.exporter.otlp.protocol", protocol)); + + List logMessages = + logs.getEvents().stream().map(LoggingEvent::getMessage).collect(toList()); + if (expectedLog == null) { + assertThat(logMessages).isEmpty(); + } else { + assertThat(logMessages).contains(expectedLog); + } + } + + private static Stream misalignedOtlpPortArgs() { + return Stream.of( + Arguments.of("http/protobuf", "http://localhost:4318/path", null), + Arguments.of("http/protobuf", "http://localhost:8080/path", null), + Arguments.of("http/protobuf", "http://localhost/path", null), + Arguments.of( + "http/protobuf", + "http://localhost:4317/path", + "OTLP exporter endpoint port is likely incorrect for protocol version \"http/protobuf\". The endpoint http://localhost:4317/path has port 4317. Typically, the \"http/protobuf\" version of OTLP uses port 4318."), + Arguments.of("grpc", "http://localhost:4317/", null), + Arguments.of("grpc", "http://localhost:8080/", null), + Arguments.of("grpc", "http://localhost/", null), + Arguments.of( + "grpc", + "http://localhost:4318/", + "OTLP exporter endpoint port is likely incorrect for protocol version \"grpc\". The endpoint http://localhost:4318/ has port 4318. Typically, the \"grpc\" version of OTLP uses port 4317.")); + } + private static ThrowingCallable configureEndpointCallable(Map properties) { return () -> configureEndpoint(DATA_TYPE_TRACES, properties); } @@ -325,6 +377,7 @@ private static String configureEndpoint(String dataType, Map pro value -> {}, value -> {}, (value1, value2) -> {}, + value -> {}, value -> {}); return endpoint.get(); @@ -361,8 +414,8 @@ void configureOtlpAggregationTemporality() { private static AggregationTemporality configureAggregationTemporality( Map properties) { AtomicReference temporalityRef = new AtomicReference<>(); - OtlpConfigUtil.configureOtlpAggregationTemporality( - DefaultConfigProperties.createFromMap(properties), temporalityRef::set); + ConfigProperties config = DefaultConfigProperties.createFromMap(properties); + ExporterBuilderUtil.configureOtlpAggregationTemporality(config, temporalityRef::set); // We apply the temporality selector to a HISTOGRAM instrument to simplify assertions return temporalityRef.get().getAggregationTemporality(InstrumentType.HISTOGRAM); } @@ -408,8 +461,8 @@ void configureOtlpHistogramDefaultAggregation() { private static DefaultAggregationSelector configureHistogramDefaultAggregation( Map properties) { AtomicReference aggregationRef = new AtomicReference<>(); - OtlpConfigUtil.configureOtlpHistogramDefaultAggregation( - DefaultConfigProperties.createFromMap(properties), aggregationRef::set); + ConfigProperties config = DefaultConfigProperties.createFromMap(properties); + ExporterBuilderUtil.configureOtlpHistogramDefaultAggregation(config, aggregationRef::set); // We apply the temporality selector to a HISTOGRAM instrument to simplify assertions return aggregationRef.get(); } diff --git a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpLogRecordExporterProviderTest.java b/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpLogRecordExporterProviderTest.java index 8bfb90e1714..47ed7859cd7 100644 --- a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpLogRecordExporterProviderTest.java +++ b/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpLogRecordExporterProviderTest.java @@ -20,6 +20,7 @@ import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder; import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.logs.export.LogRecordExporter; import java.io.IOException; import java.nio.file.Files; @@ -29,6 +30,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; +import org.assertj.core.api.AbstractObjectAssert; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -126,11 +128,16 @@ void createExporter_GrpcDefaults() { verify(grpcBuilder, never()).setTimeout(any()); verify(grpcBuilder, never()).setTrustedCertificates(any()); verify(grpcBuilder, never()).setClientTls(any(), any()); - assertThat(grpcBuilder).extracting("delegate").extracting("retryPolicy").isNull(); + assertThat(grpcBuilder).extracting("delegate").extracting("retryPolicy").isNotNull(); + getMemoryMode(exporter).isEqualTo(MemoryMode.REUSABLE_DATA); } Mockito.verifyNoInteractions(httpBuilder); } + private static AbstractObjectAssert getMemoryMode(LogRecordExporter exporter) { + return assertThat(exporter).extracting("marshaler").extracting("memoryMode"); + } + @Test void createExporter_GrpcWithGeneralConfiguration() throws CertificateEncodingException { Map config = new HashMap<>(); @@ -141,7 +148,7 @@ void createExporter_GrpcWithGeneralConfiguration() throws CertificateEncodingExc config.put("otel.exporter.otlp.headers", "header-key=header-value"); config.put("otel.exporter.otlp.compression", "gzip"); config.put("otel.exporter.otlp.timeout", "15s"); - config.put("otel.experimental.exporter.otlp.retry.enabled", "true"); + config.put("otel.java.exporter.otlp.retry.disabled", "true"); try (LogRecordExporter exporter = provider.createExporter(DefaultConfigProperties.createFromMap(config))) { @@ -154,7 +161,7 @@ void createExporter_GrpcWithGeneralConfiguration() throws CertificateEncodingExc verify(grpcBuilder).setTrustedCertificates(serverTls.certificate().getEncoded()); verify(grpcBuilder) .setClientTls(clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); - assertThat(grpcBuilder).extracting("delegate").extracting("retryPolicy").isNotNull(); + assertThat(grpcBuilder).extracting("delegate").extracting("retryPolicy").isNull(); } Mockito.verifyNoInteractions(httpBuilder); } @@ -176,6 +183,7 @@ void createExporter_GrpcWithSignalConfiguration() throws CertificateEncodingExce config.put("otel.exporter.otlp.logs.compression", "gzip"); config.put("otel.exporter.otlp.timeout", "1s"); config.put("otel.exporter.otlp.logs.timeout", "15s"); + config.put("otel.java.exporter.memory_mode", "immutable_data"); try (LogRecordExporter exporter = provider.createExporter(DefaultConfigProperties.createFromMap(config))) { @@ -188,6 +196,7 @@ void createExporter_GrpcWithSignalConfiguration() throws CertificateEncodingExce verify(grpcBuilder).setTrustedCertificates(serverTls.certificate().getEncoded()); verify(grpcBuilder) .setClientTls(clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); + getMemoryMode(exporter).isEqualTo(MemoryMode.IMMUTABLE_DATA); } Mockito.verifyNoInteractions(httpBuilder); } @@ -206,7 +215,8 @@ void createExporter_HttpDefaults() { verify(httpBuilder, never()).setTimeout(any()); verify(httpBuilder, never()).setTrustedCertificates(any()); verify(httpBuilder, never()).setClientTls(any(), any()); - assertThat(httpBuilder).extracting("delegate").extracting("retryPolicy").isNull(); + assertThat(httpBuilder).extracting("delegate").extracting("retryPolicy").isNotNull(); + getMemoryMode(exporter).isEqualTo(MemoryMode.REUSABLE_DATA); } Mockito.verifyNoInteractions(grpcBuilder); } @@ -222,7 +232,7 @@ void createExporter_HttpWithGeneralConfiguration() throws CertificateEncodingExc config.put("otel.exporter.otlp.headers", "header-key=header-value"); config.put("otel.exporter.otlp.compression", "gzip"); config.put("otel.exporter.otlp.timeout", "15s"); - config.put("otel.experimental.exporter.otlp.retry.enabled", "true"); + config.put("otel.java.exporter.otlp.retry.disabled", "true"); try (LogRecordExporter exporter = provider.createExporter(DefaultConfigProperties.createFromMap(config))) { @@ -235,7 +245,7 @@ void createExporter_HttpWithGeneralConfiguration() throws CertificateEncodingExc verify(httpBuilder).setTrustedCertificates(serverTls.certificate().getEncoded()); verify(httpBuilder) .setClientTls(clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); - assertThat(httpBuilder).extracting("delegate").extracting("retryPolicy").isNotNull(); + assertThat(httpBuilder).extracting("delegate").extracting("retryPolicy").isNull(); } Mockito.verifyNoInteractions(grpcBuilder); } @@ -259,6 +269,7 @@ void createExporter_HttpWithSignalConfiguration() throws CertificateEncodingExce config.put("otel.exporter.otlp.logs.compression", "gzip"); config.put("otel.exporter.otlp.timeout", "1s"); config.put("otel.exporter.otlp.logs.timeout", "15s"); + config.put("otel.java.exporter.memory_mode", "immutable_data"); try (LogRecordExporter exporter = provider.createExporter(DefaultConfigProperties.createFromMap(config))) { @@ -271,6 +282,7 @@ void createExporter_HttpWithSignalConfiguration() throws CertificateEncodingExce verify(httpBuilder).setTrustedCertificates(serverTls.certificate().getEncoded()); verify(httpBuilder) .setClientTls(clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); + getMemoryMode(exporter).isEqualTo(MemoryMode.IMMUTABLE_DATA); } Mockito.verifyNoInteractions(grpcBuilder); } diff --git a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpMetricExporterProviderTest.java b/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpMetricExporterProviderTest.java index f1ff7a71f02..3d0a4098328 100644 --- a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpMetricExporterProviderTest.java +++ b/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpMetricExporterProviderTest.java @@ -20,6 +20,7 @@ import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder; import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.export.MetricExporter; import java.io.IOException; import java.nio.file.Files; @@ -126,7 +127,8 @@ void createExporter_GrpcDefaults() { verify(grpcBuilder, never()).setTimeout(any()); verify(grpcBuilder, never()).setTrustedCertificates(any()); verify(grpcBuilder, never()).setClientTls(any(), any()); - assertThat(grpcBuilder).extracting("delegate").extracting("retryPolicy").isNull(); + assertThat(grpcBuilder).extracting("delegate").extracting("retryPolicy").isNotNull(); + assertThat(exporter.getMemoryMode()).isEqualTo(MemoryMode.REUSABLE_DATA); } Mockito.verifyNoInteractions(httpBuilder); } @@ -141,7 +143,7 @@ void createExporter_GrpcWithGeneralConfiguration() throws CertificateEncodingExc config.put("otel.exporter.otlp.headers", "header-key=header-value"); config.put("otel.exporter.otlp.compression", "gzip"); config.put("otel.exporter.otlp.timeout", "15s"); - config.put("otel.experimental.exporter.otlp.retry.enabled", "true"); + config.put("otel.java.exporter.otlp.retry.disabled", "true"); try (MetricExporter exporter = provider.createExporter(DefaultConfigProperties.createFromMap(config))) { @@ -154,7 +156,7 @@ void createExporter_GrpcWithGeneralConfiguration() throws CertificateEncodingExc verify(grpcBuilder).setTrustedCertificates(serverTls.certificate().getEncoded()); verify(grpcBuilder) .setClientTls(clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); - assertThat(grpcBuilder).extracting("delegate").extracting("retryPolicy").isNotNull(); + assertThat(grpcBuilder).extracting("delegate").extracting("retryPolicy").isNull(); } Mockito.verifyNoInteractions(httpBuilder); } @@ -176,6 +178,7 @@ void createExporter_GrpcWithSignalConfiguration() throws CertificateEncodingExce config.put("otel.exporter.otlp.metrics.compression", "gzip"); config.put("otel.exporter.otlp.timeout", "1s"); config.put("otel.exporter.otlp.metrics.timeout", "15s"); + config.put("otel.java.exporter.memory_mode", "immutable_data"); try (MetricExporter exporter = provider.createExporter(DefaultConfigProperties.createFromMap(config))) { @@ -188,6 +191,7 @@ void createExporter_GrpcWithSignalConfiguration() throws CertificateEncodingExce verify(grpcBuilder).setTrustedCertificates(serverTls.certificate().getEncoded()); verify(grpcBuilder) .setClientTls(clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); + assertThat(exporter.getMemoryMode()).isEqualTo(MemoryMode.IMMUTABLE_DATA); } Mockito.verifyNoInteractions(httpBuilder); } @@ -207,7 +211,8 @@ void createExporter_HttpDefaults() { verify(httpBuilder, never()).setTimeout(any()); verify(httpBuilder, never()).setTrustedCertificates(any()); verify(httpBuilder, never()).setClientTls(any(), any()); - assertThat(httpBuilder).extracting("delegate").extracting("retryPolicy").isNull(); + assertThat(httpBuilder).extracting("delegate").extracting("retryPolicy").isNotNull(); + assertThat(exporter.getMemoryMode()).isEqualTo(MemoryMode.REUSABLE_DATA); } Mockito.verifyNoInteractions(grpcBuilder); } @@ -223,7 +228,7 @@ void createExporter_HttpWithGeneralConfiguration() throws CertificateEncodingExc config.put("otel.exporter.otlp.headers", "header-key=header-value"); config.put("otel.exporter.otlp.compression", "gzip"); config.put("otel.exporter.otlp.timeout", "15s"); - config.put("otel.experimental.exporter.otlp.retry.enabled", "true"); + config.put("otel.java.exporter.otlp.retry.disabled", "true"); try (MetricExporter exporter = provider.createExporter(DefaultConfigProperties.createFromMap(config))) { @@ -236,7 +241,7 @@ void createExporter_HttpWithGeneralConfiguration() throws CertificateEncodingExc verify(httpBuilder).setTrustedCertificates(serverTls.certificate().getEncoded()); verify(httpBuilder) .setClientTls(clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); - assertThat(httpBuilder).extracting("delegate").extracting("retryPolicy").isNotNull(); + assertThat(httpBuilder).extracting("delegate").extracting("retryPolicy").isNull(); } Mockito.verifyNoInteractions(grpcBuilder); } @@ -260,6 +265,7 @@ void createExporter_HttpWithSignalConfiguration() throws CertificateEncodingExce config.put("otel.exporter.otlp.metrics.compression", "gzip"); config.put("otel.exporter.otlp.timeout", "1s"); config.put("otel.exporter.otlp.metrics.timeout", "15s"); + config.put("otel.java.exporter.memory_mode", "immutable_data"); try (MetricExporter exporter = provider.createExporter(DefaultConfigProperties.createFromMap(config))) { @@ -272,6 +278,7 @@ void createExporter_HttpWithSignalConfiguration() throws CertificateEncodingExce verify(httpBuilder).setTrustedCertificates(serverTls.certificate().getEncoded()); verify(httpBuilder) .setClientTls(clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); + assertThat(exporter.getMemoryMode()).isEqualTo(MemoryMode.IMMUTABLE_DATA); } Mockito.verifyNoInteractions(grpcBuilder); } diff --git a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpSpanExporterProviderTest.java b/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpSpanExporterProviderTest.java index 7d83c3104f8..46d2e8ea218 100644 --- a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpSpanExporterProviderTest.java +++ b/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/internal/OtlpSpanExporterProviderTest.java @@ -20,6 +20,7 @@ import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder; import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.trace.export.SpanExporter; import java.io.IOException; import java.nio.file.Files; @@ -29,6 +30,8 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; +import org.assertj.core.api.AbstractObjectAssert; +import org.assertj.core.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -126,11 +129,16 @@ void createExporter_GrpcDefaults() { verify(grpcBuilder, never()).setTimeout(any()); verify(grpcBuilder, never()).setTrustedCertificates(any()); verify(grpcBuilder, never()).setClientTls(any(), any()); - assertThat(grpcBuilder).extracting("delegate").extracting("retryPolicy").isNull(); + assertThat(grpcBuilder).extracting("delegate").extracting("retryPolicy").isNotNull(); + getMemoryMode(exporter).isEqualTo(MemoryMode.REUSABLE_DATA); } Mockito.verifyNoInteractions(httpBuilder); } + private static AbstractObjectAssert getMemoryMode(SpanExporter exporter) { + return assertThat(exporter).extracting("marshaler").extracting("memoryMode"); + } + @Test void createExporter_GrpcWithGeneralConfiguration() throws CertificateEncodingException { Map config = new HashMap<>(); @@ -141,7 +149,7 @@ void createExporter_GrpcWithGeneralConfiguration() throws CertificateEncodingExc config.put("otel.exporter.otlp.headers", "header-key=header-value"); config.put("otel.exporter.otlp.compression", "gzip"); config.put("otel.exporter.otlp.timeout", "15s"); - config.put("otel.experimental.exporter.otlp.retry.enabled", "true"); + config.put("otel.java.exporter.otlp.retry.disabled", "true"); try (SpanExporter exporter = provider.createExporter(DefaultConfigProperties.createFromMap(config))) { @@ -154,7 +162,7 @@ void createExporter_GrpcWithGeneralConfiguration() throws CertificateEncodingExc verify(grpcBuilder).setTrustedCertificates(serverTls.certificate().getEncoded()); verify(grpcBuilder) .setClientTls(clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); - assertThat(grpcBuilder).extracting("delegate").extracting("retryPolicy").isNotNull(); + assertThat(grpcBuilder).extracting("delegate").extracting("retryPolicy").isNull(); } Mockito.verifyNoInteractions(httpBuilder); } @@ -176,6 +184,7 @@ void createExporter_GrpcWithSignalConfiguration() throws CertificateEncodingExce config.put("otel.exporter.otlp.traces.compression", "gzip"); config.put("otel.exporter.otlp.timeout", "1s"); config.put("otel.exporter.otlp.traces.timeout", "15s"); + config.put("otel.java.exporter.memory_mode", "immutable_data"); try (SpanExporter exporter = provider.createExporter(DefaultConfigProperties.createFromMap(config))) { @@ -188,6 +197,7 @@ void createExporter_GrpcWithSignalConfiguration() throws CertificateEncodingExce verify(grpcBuilder).setTrustedCertificates(serverTls.certificate().getEncoded()); verify(grpcBuilder) .setClientTls(clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); + getMemoryMode(exporter).isEqualTo(MemoryMode.IMMUTABLE_DATA); } Mockito.verifyNoInteractions(httpBuilder); } @@ -206,7 +216,8 @@ void createExporter_HttpDefaults() { verify(httpBuilder, never()).setTimeout(any()); verify(httpBuilder, never()).setTrustedCertificates(any()); verify(httpBuilder, never()).setClientTls(any(), any()); - assertThat(httpBuilder).extracting("delegate").extracting("retryPolicy").isNull(); + assertThat(httpBuilder).extracting("delegate").extracting("retryPolicy").isNotNull(); + getMemoryMode(exporter).isEqualTo(MemoryMode.REUSABLE_DATA); } Mockito.verifyNoInteractions(grpcBuilder); } @@ -219,23 +230,26 @@ void createExporter_HttpWithGeneralConfiguration() throws CertificateEncodingExc config.put("otel.exporter.otlp.certificate", certificatePath); config.put("otel.exporter.otlp.client.key", clientKeyPath); config.put("otel.exporter.otlp.client.certificate", clientCertificatePath); - config.put("otel.exporter.otlp.headers", "header-key=header-value"); + config.put( + "otel.exporter.otlp.headers", "header-key1=header%20value1,header-key2=header value2"); config.put("otel.exporter.otlp.compression", "gzip"); config.put("otel.exporter.otlp.timeout", "15s"); - config.put("otel.experimental.exporter.otlp.retry.enabled", "true"); + config.put("otel.java.exporter.otlp.retry.disabled", "true"); try (SpanExporter exporter = provider.createExporter(DefaultConfigProperties.createFromMap(config))) { assertThat(exporter).isInstanceOf(OtlpHttpSpanExporter.class); verify(httpBuilder, times(1)).build(); verify(httpBuilder).setEndpoint("https://localhost:443/v1/traces"); - verify(httpBuilder).addHeader("header-key", "header-value"); + verify(httpBuilder).addHeader("header-key1", "header value1"); + verify(httpBuilder).addHeader("header-key2", "header value2"); verify(httpBuilder).setCompression("gzip"); verify(httpBuilder).setTimeout(Duration.ofSeconds(15)); verify(httpBuilder).setTrustedCertificates(serverTls.certificate().getEncoded()); verify(httpBuilder) .setClientTls(clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); - assertThat(httpBuilder).extracting("delegate").extracting("retryPolicy").isNotNull(); + assertThat(httpBuilder).extracting("delegate").extracting("retryPolicy").isNull(); + getMemoryMode(exporter).isEqualTo(MemoryMode.REUSABLE_DATA); } Mockito.verifyNoInteractions(grpcBuilder); } @@ -259,6 +273,7 @@ void createExporter_HttpWithSignalConfiguration() throws CertificateEncodingExce config.put("otel.exporter.otlp.traces.compression", "gzip"); config.put("otel.exporter.otlp.timeout", "1s"); config.put("otel.exporter.otlp.traces.timeout", "15s"); + config.put("otel.java.exporter.memory_mode", "immutable_data"); try (SpanExporter exporter = provider.createExporter(DefaultConfigProperties.createFromMap(config))) { @@ -271,7 +286,20 @@ void createExporter_HttpWithSignalConfiguration() throws CertificateEncodingExce verify(httpBuilder).setTrustedCertificates(serverTls.certificate().getEncoded()); verify(httpBuilder) .setClientTls(clientTls.privateKey().getEncoded(), clientTls.certificate().getEncoded()); + getMemoryMode(exporter).isEqualTo(MemoryMode.IMMUTABLE_DATA); } Mockito.verifyNoInteractions(grpcBuilder); } + + @Test + void createExporter_decodingError() { + Assertions.assertThatThrownBy( + () -> { + provider.createExporter( + DefaultConfigProperties.createFromMap( + Collections.singletonMap("otel.exporter.otlp.headers", "header-key=%-1"))); + }) + .isInstanceOf(ConfigurationException.class) + .hasMessage("Cannot decode header value: %-1"); + } } diff --git a/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/OkHttpVersionTest.java b/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/OkHttpVersionTest.java new file mode 100644 index 00000000000..8798781bb3f --- /dev/null +++ b/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/OkHttpVersionTest.java @@ -0,0 +1,27 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assumptions.assumeThat; + +import java.util.logging.Level; +import java.util.logging.Logger; +import okhttp3.OkHttp; +import org.junit.jupiter.api.Test; + +class OkHttpVersionTest { + + private static final Logger LOGGER = Logger.getLogger(OkHttpVersionTest.class.getName()); + + @Test + void expectedOkHttpVersion() { + String expectedVersion = System.getProperty("expected.okhttp.version"); + LOGGER.log(Level.WARNING, "Testing okhttp version " + expectedVersion); + assumeThat(expectedVersion.equals("LATEST")).isFalse(); + assertThat(OkHttp.VERSION).isEqualTo(expectedVersion); + } +} diff --git a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/http/logs/OtlpHttpLogRecordExporterOkHttpSenderTest.java b/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/http/logs/OtlpHttpLogRecordExporterOkHttpSenderTest.java similarity index 100% rename from exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/http/logs/OtlpHttpLogRecordExporterOkHttpSenderTest.java rename to exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/http/logs/OtlpHttpLogRecordExporterOkHttpSenderTest.java diff --git a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterOkHttpSenderTest.java b/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterOkHttpSenderTest.java similarity index 75% rename from exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterOkHttpSenderTest.java rename to exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterOkHttpSenderTest.java index 052e4c9c8b2..eee09f314c5 100644 --- a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterOkHttpSenderTest.java +++ b/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterOkHttpSenderTest.java @@ -23,7 +23,9 @@ import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; +import io.opentelemetry.sdk.metrics.export.MetricExporter; import java.util.List; +import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.Test; class OtlpHttpMetricExporterOkHttpSenderTest @@ -77,6 +79,34 @@ void invalidMetricConfig() { .hasMessage("defaultAggregationSelector"); } + /** Test configuration specific to metric exporter. */ + @Test + void stringRepresentation() { + try (MetricExporter metricExporter = OtlpHttpMetricExporter.builder().build()) { + assertThat(metricExporter.toString()) + .matches( + "OtlpHttpMetricExporter\\{" + + "exporterName=otlp, " + + "type=metric, " + + "endpoint=http://localhost:4318/v1/metrics, " + + "timeoutNanos=" + + TimeUnit.SECONDS.toNanos(10) + + ", " + + "proxyOptions=null, " + + "compressorEncoding=null, " + + "connectTimeoutNanos=" + + TimeUnit.SECONDS.toNanos(10) + + ", " + + "exportAsJson=false, " + + "headers=Headers\\{User-Agent=OBFUSCATED\\}, " + + "retryPolicy=RetryPolicy\\{.*\\}, " + + "aggregationTemporalitySelector=AggregationTemporalitySelector\\{.*\\}, " + + "defaultAggregationSelector=DefaultAggregationSelector\\{.*\\}, " + + "memoryMode=IMMUTABLE_DATA" + + "\\}"); + } + } + @Override protected TelemetryExporterBuilder exporterBuilder() { return new HttpMetricExporterBuilderWrapper(OtlpHttpMetricExporter.builder()); diff --git a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterOkHttpSenderTest.java b/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterOkHttpSenderTest.java similarity index 59% rename from exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterOkHttpSenderTest.java rename to exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterOkHttpSenderTest.java index 4752832bbc0..da42bed03d8 100644 --- a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterOkHttpSenderTest.java +++ b/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterOkHttpSenderTest.java @@ -5,6 +5,8 @@ package io.opentelemetry.exporter.otlp.http.trace; +import static org.assertj.core.api.Assertions.assertThat; + import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.exporter.internal.otlp.traces.ResourceSpansMarshaler; import io.opentelemetry.exporter.otlp.testing.internal.AbstractHttpTelemetryExporterTest; @@ -14,7 +16,10 @@ import io.opentelemetry.exporter.otlp.testing.internal.TelemetryExporterBuilder; import io.opentelemetry.proto.trace.v1.ResourceSpans; import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SpanExporter; import java.util.List; +import java.util.concurrent.TimeUnit; +import org.junit.jupiter.api.Test; class OtlpHttpSpanExporterOkHttpSenderTest extends AbstractHttpTelemetryExporterTest { @@ -23,6 +28,32 @@ protected OtlpHttpSpanExporterOkHttpSenderTest() { super("span", "/v1/traces", ResourceSpans.getDefaultInstance()); } + /** Test configuration specific to metric exporter. */ + @Test + void stringRepresentation() { + try (SpanExporter spanExporter = OtlpHttpSpanExporter.builder().build()) { + assertThat(spanExporter.toString()) + .matches( + "OtlpHttpSpanExporter\\{" + + "exporterName=otlp, " + + "type=span, " + + "endpoint=http://localhost:4318/v1/traces, " + + "timeoutNanos=" + + TimeUnit.SECONDS.toNanos(10) + + ", " + + "proxyOptions=null, " + + "compressorEncoding=null, " + + "connectTimeoutNanos=" + + TimeUnit.SECONDS.toNanos(10) + + ", " + + "exportAsJson=false, " + + "headers=Headers\\{User-Agent=OBFUSCATED\\}, " + + "retryPolicy=RetryPolicy\\{.*\\}, " + + "memoryMode=IMMUTABLE_DATA" + + "\\}"); + } + } + @Override protected TelemetryExporterBuilder exporterBuilder() { return new HttpSpanExporterBuilderWrapper(OtlpHttpSpanExporter.builder()); diff --git a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/logs/OtlpGrpcLogRecordExporterTest.java b/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/logs/OtlpGrpcLogRecordExporterTest.java similarity index 100% rename from exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/logs/OtlpGrpcLogRecordExporterTest.java rename to exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/logs/OtlpGrpcLogRecordExporterTest.java diff --git a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporterTest.java b/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporterTest.java similarity index 77% rename from exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporterTest.java rename to exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporterTest.java index 65cbf9320ce..6e57cabd41c 100644 --- a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporterTest.java +++ b/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/metrics/OtlpGrpcMetricExporterTest.java @@ -23,8 +23,10 @@ import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; +import io.opentelemetry.sdk.metrics.export.MetricExporter; import java.io.Closeable; import java.util.List; +import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.Test; class OtlpGrpcMetricExporterTest @@ -78,6 +80,33 @@ void invalidMetricConfig() { .hasMessage("defaultAggregationSelector"); } + /** Test configuration specific to metric exporter. */ + @Test + void stringRepresentation() { + try (MetricExporter metricExporter = OtlpGrpcMetricExporter.builder().build()) { + assertThat(metricExporter.toString()) + .matches( + "OtlpGrpcMetricExporter\\{" + + "exporterName=otlp, " + + "type=metric, " + + "endpoint=http://localhost:4317, " + + "endpointPath=.*, " + + "timeoutNanos=" + + TimeUnit.SECONDS.toNanos(10) + + ", " + + "connectTimeoutNanos=" + + TimeUnit.SECONDS.toNanos(10) + + ", " + + "compressorEncoding=null, " + + "headers=Headers\\{User-Agent=OBFUSCATED\\}, " + + "retryPolicy=RetryPolicy\\{.*\\}, " + + "aggregationTemporalitySelector=AggregationTemporalitySelector\\{.*\\}, " + + "defaultAggregationSelector=DefaultAggregationSelector\\{.*\\}, " + + "memoryMode=IMMUTABLE_DATA" + + "\\}"); + } + } + @Test void usingOkHttp() throws Exception { try (Closeable exporter = OtlpGrpcMetricExporter.builder().build()) { diff --git a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/trace/OtlpGrpcSpanExporterTest.java b/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/traces/OtlpGrpcSpanExporterTest.java similarity index 62% rename from exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/trace/OtlpGrpcSpanExporterTest.java rename to exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/traces/OtlpGrpcSpanExporterTest.java index 9017cd4584e..e54b7279cb5 100644 --- a/exporters/otlp/all/src/test/java/io/opentelemetry/exporter/otlp/trace/OtlpGrpcSpanExporterTest.java +++ b/exporters/otlp/all/src/testDefaultSender/java/io/opentelemetry/exporter/otlp/traces/OtlpGrpcSpanExporterTest.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.exporter.otlp.trace; +package io.opentelemetry.exporter.otlp.traces; import static org.assertj.core.api.Assertions.assertThat; @@ -13,11 +13,14 @@ import io.opentelemetry.exporter.otlp.testing.internal.FakeTelemetryUtil; import io.opentelemetry.exporter.otlp.testing.internal.TelemetryExporter; import io.opentelemetry.exporter.otlp.testing.internal.TelemetryExporterBuilder; +import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; import io.opentelemetry.exporter.sender.okhttp.internal.OkHttpGrpcSender; import io.opentelemetry.proto.trace.v1.ResourceSpans; import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SpanExporter; import java.io.Closeable; import java.util.List; +import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.Test; class OtlpGrpcSpanExporterTest extends AbstractGrpcTelemetryExporterTest { @@ -26,6 +29,31 @@ class OtlpGrpcSpanExporterTest extends AbstractGrpcTelemetryExporterTest exporterBuilder() { return new HttpLogRecordExporterBuilderWrapper(OtlpHttpLogRecordExporter.builder()); diff --git a/exporters/otlp/all/src/testJdkHttpSender/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterJdkSenderTest.java b/exporters/otlp/all/src/testJdkHttpSender/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterJdkSenderTest.java index 5418acea369..88f7b6cdfe3 100644 --- a/exporters/otlp/all/src/testJdkHttpSender/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterJdkSenderTest.java +++ b/exporters/otlp/all/src/testJdkHttpSender/java/io/opentelemetry/exporter/otlp/http/metrics/OtlpHttpMetricExporterJdkSenderTest.java @@ -24,7 +24,9 @@ import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; +import io.opentelemetry.sdk.metrics.export.MetricExporter; import java.util.List; +import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.Test; class OtlpHttpMetricExporterJdkSenderTest @@ -78,9 +80,32 @@ void invalidMetricConfig() { .hasMessage("defaultAggregationSelector"); } - @Override - protected boolean hasAuthenticatorSupport() { - return false; + /** Test configuration specific to metric exporter. */ + @Test + void stringRepresentation() { + try (MetricExporter metricExporter = OtlpHttpMetricExporter.builder().build()) { + assertThat(metricExporter.toString()) + .matches( + "OtlpHttpMetricExporter\\{" + + "exporterName=otlp, " + + "type=metric, " + + "endpoint=http://localhost:4318/v1/metrics, " + + "timeoutNanos=" + + TimeUnit.SECONDS.toNanos(10) + + ", " + + "proxyOptions=null, " + + "compressorEncoding=null, " + + "connectTimeoutNanos=" + + TimeUnit.SECONDS.toNanos(10) + + ", " + + "exportAsJson=false, " + + "headers=Headers\\{User-Agent=OBFUSCATED\\}, " + + "retryPolicy=RetryPolicy\\{.*\\}, " + + "aggregationTemporalitySelector=AggregationTemporalitySelector\\{.*\\}, " + + "defaultAggregationSelector=DefaultAggregationSelector\\{.*\\}, " + + "memoryMode=IMMUTABLE_DATA" + + "\\}"); + } } @Override diff --git a/exporters/otlp/all/src/testJdkHttpSender/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterJdkSenderTest.java b/exporters/otlp/all/src/testJdkHttpSender/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterJdkSenderTest.java index 28a451addb4..4b546d1d796 100644 --- a/exporters/otlp/all/src/testJdkHttpSender/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterJdkSenderTest.java +++ b/exporters/otlp/all/src/testJdkHttpSender/java/io/opentelemetry/exporter/otlp/http/trace/OtlpHttpSpanExporterJdkSenderTest.java @@ -27,11 +27,6 @@ protected OtlpHttpSpanExporterJdkSenderTest() { super("span", "/v1/traces", ResourceSpans.getDefaultInstance()); } - @Override - protected boolean hasAuthenticatorSupport() { - return false; - } - @Override protected TelemetryExporterBuilder exporterBuilder() { return new HttpSpanExporterBuilderWrapper(OtlpHttpSpanExporter.builder()); diff --git a/exporters/otlp/all/src/testSpanPipeline/java/io/opentelemetry/exporter/otlp/trace/SpanPipelineOtlpBenchmark.java b/exporters/otlp/all/src/testSpanPipeline/java/io/opentelemetry/exporter/otlp/trace/SpanPipelineOtlpBenchmark.java index 3fc1f5f9a5a..a5536b31c3b 100644 --- a/exporters/otlp/all/src/testSpanPipeline/java/io/opentelemetry/exporter/otlp/trace/SpanPipelineOtlpBenchmark.java +++ b/exporters/otlp/all/src/testSpanPipeline/java/io/opentelemetry/exporter/otlp/trace/SpanPipelineOtlpBenchmark.java @@ -23,8 +23,8 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; @@ -70,11 +70,11 @@ protected CompletionStage handleMessage( } }; - private static SdkTracerProvider tracerProvider; - private static Tracer tracer; + private SdkTracerProvider tracerProvider; + private Tracer tracer; - @BeforeAll - public static void setUp() { + @BeforeEach + public void setUp() { tracerProvider = SdkTracerProvider.builder() .setResource(RESOURCE) @@ -89,12 +89,12 @@ public static void setUp() { tracer = tracerProvider.get("benchmark"); } - @AfterAll - public static void tearDown() { + @AfterEach + public void tearDown() { tracerProvider.close(); } - private static void createSpan() { + private void createSpan() { Span span = tracer.spanBuilder("POST /search").startSpan(); try (Scope ignored = span.makeCurrent()) { span.setAllAttributes(SPAN_ATTRIBUTES); @@ -109,10 +109,10 @@ void runPipeline() { long endTimeNanos = startTimeNanos + TimeUnit.SECONDS.toNanos(60); try { while (System.nanoTime() < endTimeNanos) { - SpanPipelineOtlpBenchmark.createSpan(); + createSpan(); } } finally { - SpanPipelineOtlpBenchmark.tearDown(); + tearDown(); } } } diff --git a/exporters/otlp/common/build.gradle.kts b/exporters/otlp/common/build.gradle.kts index 35b7a1b2eb7..94d50e46fd0 100644 --- a/exporters/otlp/common/build.gradle.kts +++ b/exporters/otlp/common/build.gradle.kts @@ -15,6 +15,8 @@ val versions: Map by project dependencies { protoSource("io.opentelemetry.proto:opentelemetry-proto:${versions["io.opentelemetry.proto"]}") + annotationProcessor("com.google.auto.value:auto-value") + api(project(":exporters:common")) compileOnly(project(":sdk:metrics")) @@ -28,8 +30,10 @@ dependencies { testImplementation("com.fasterxml.jackson.core:jackson-databind") testImplementation("com.google.protobuf:protobuf-java-util") + testImplementation("com.google.guava:guava") testImplementation("io.opentelemetry.proto:opentelemetry-proto") + jmhImplementation(project(":api:incubator")) jmhImplementation(project(":sdk:testing")) jmhImplementation("com.fasterxml.jackson.core:jackson-core") jmhImplementation("io.opentelemetry.proto:opentelemetry-proto") @@ -41,6 +45,7 @@ wire { "opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest", "opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest", "opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest", + "opentelemetry.proto.collector.profiles.v1development.ExportProfilesServiceRequest" ) custom { diff --git a/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/GrpcGzipBenchmark.java b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/GrpcGzipBenchmark.java index 5bbc501700c..21a98f5546c 100644 --- a/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/GrpcGzipBenchmark.java +++ b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/GrpcGzipBenchmark.java @@ -141,8 +141,8 @@ public class GrpcGzipBenchmark { } @Benchmark - public ByteArrayOutputStream gzipCompressor() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); + public TestOutputStream gzipCompressor() throws IOException { + TestOutputStream baos = new TestOutputStream(); OutputStream gzos = GZIP_CODEC.compress(baos); METRICS_REQUEST.writeTo(gzos); gzos.close(); @@ -150,8 +150,8 @@ public ByteArrayOutputStream gzipCompressor() throws IOException { } @Benchmark - public ByteArrayOutputStream identityCompressor() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); + public TestOutputStream identityCompressor() throws IOException { + TestOutputStream baos = new TestOutputStream(); OutputStream gzos = IDENTITY_CODEC.compress(baos); METRICS_REQUEST.writeTo(gzos); gzos.close(); diff --git a/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/LogsRequestMarshalerBenchmark.java b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/LogsRequestMarshalerBenchmark.java new file mode 100644 index 00000000000..09bc21576c2 --- /dev/null +++ b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/LogsRequestMarshalerBenchmark.java @@ -0,0 +1,146 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.KeyValue; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.incubator.logs.ExtendedLogger; +import io.opentelemetry.api.logs.Logger; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.exporter.internal.otlp.logs.LogsRequestMarshaler; +import io.opentelemetry.exporter.internal.otlp.logs.LowAllocationLogsRequestMarshaler; +import io.opentelemetry.sdk.logs.SdkLoggerProvider; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.exporter.InMemoryLogRecordExporter; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.concurrent.TimeUnit; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Warmup; + +@BenchmarkMode({Mode.AverageTime}) +@OutputTimeUnit(TimeUnit.MICROSECONDS) +@Warmup(iterations = 5, time = 1) +@Measurement(iterations = 10, time = 1) +@Fork(1) +public class LogsRequestMarshalerBenchmark { + + private static final Collection LOGS; + private static final LowAllocationLogsRequestMarshaler MARSHALER = + new LowAllocationLogsRequestMarshaler(); + private static final TestOutputStream OUTPUT = new TestOutputStream(); + + static { + InMemoryLogRecordExporter logRecordExporter = InMemoryLogRecordExporter.create(); + SdkLoggerProvider loggerProvider = + SdkLoggerProvider.builder() + .setResource( + Resource.create( + Attributes.builder() + .put(AttributeKey.booleanKey("key_bool"), true) + .put(AttributeKey.stringKey("key_string"), "string") + .put(AttributeKey.longKey("key_int"), 100L) + .put(AttributeKey.doubleKey("key_double"), 100.3) + .put( + AttributeKey.stringArrayKey("key_string_array"), + Arrays.asList("string", "string")) + .put(AttributeKey.longArrayKey("key_long_array"), Arrays.asList(12L, 23L)) + .put( + AttributeKey.doubleArrayKey("key_double_array"), + Arrays.asList(12.3, 23.1)) + .put( + AttributeKey.booleanArrayKey("key_boolean_array"), + Arrays.asList(true, false)) + .build())) + .addLogRecordProcessor(SimpleLogRecordProcessor.create(logRecordExporter)) + .build(); + + Logger logger1 = loggerProvider.get("logger"); + logger1 + .logRecordBuilder() + .setBody("Hello world from this log...") + .setAllAttributes( + Attributes.builder() + .put("key_bool", true) + .put("key_String", "string") + .put("key_int", 100L) + .put("key_double", 100.3) + .build()) + .setSeverity(Severity.INFO) + .setSeverityText("INFO") + .emit(); + + ((ExtendedLogger) logger1) + .logRecordBuilder() + .setEventName("namespace.my-event-name") + .setBody( + Value.of( + KeyValue.of("stringKey", Value.of("value")), + KeyValue.of("longKey", Value.of(1)), + KeyValue.of("doubleKey", Value.of(1.0)), + KeyValue.of("boolKey", Value.of(true)), + KeyValue.of("stringArrKey", Value.of(Value.of("value1"), Value.of("value2"))), + KeyValue.of("longArrKey", Value.of(Value.of(1), Value.of(2))), + KeyValue.of("doubleArrKey", Value.of(Value.of(1.0), Value.of(2.0))), + KeyValue.of("boolArrKey", Value.of(Value.of(true), Value.of(false))), + KeyValue.of( + "key", Value.of(Collections.singletonMap("childKey1", Value.of("value")))))) + .emit(); + + LOGS = logRecordExporter.getFinishedLogRecordItems(); + } + + @Benchmark + public int marshalStateful() throws IOException { + LogsRequestMarshaler marshaler = LogsRequestMarshaler.create(LOGS); + OUTPUT.reset(); + marshaler.writeBinaryTo(OUTPUT); + return OUTPUT.getCount(); + } + + @Benchmark + public int marshalStatefulJson() throws IOException { + LogsRequestMarshaler marshaler = LogsRequestMarshaler.create(LOGS); + OUTPUT.reset(); + marshaler.writeJsonTo(OUTPUT); + return OUTPUT.getCount(); + } + + @Benchmark + public int marshalStateless() throws IOException { + MARSHALER.initialize(LOGS); + try { + OUTPUT.reset(); + MARSHALER.writeBinaryTo(OUTPUT); + return OUTPUT.getCount(); + } finally { + MARSHALER.reset(); + } + } + + @Benchmark + public int marshalStatelessJson() throws IOException { + MARSHALER.initialize(LOGS); + try { + OUTPUT.reset(); + MARSHALER.writeJsonTo(OUTPUT); + return OUTPUT.getCount(); + } finally { + MARSHALER.reset(); + } + } +} diff --git a/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/MetricsRequestMarshalerBenchmark.java b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/MetricsRequestMarshalerBenchmark.java index c15a789809f..95d652c3533 100644 --- a/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/MetricsRequestMarshalerBenchmark.java +++ b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/MetricsRequestMarshalerBenchmark.java @@ -13,12 +13,12 @@ import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.exporter.internal.otlp.metrics.LowAllocationMetricsRequestMarshaler; import io.opentelemetry.exporter.internal.otlp.metrics.MetricsRequestMarshaler; import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Arrays; import java.util.Collection; @@ -39,6 +39,9 @@ public class MetricsRequestMarshalerBenchmark { private static final Collection METRICS; + private static final LowAllocationMetricsRequestMarshaler MARSHALER = + new LowAllocationMetricsRequestMarshaler(); + private static final TestOutputStream OUTPUT = new TestOutputStream(); static { InMemoryMetricReader metricReader = InMemoryMetricReader.create(); @@ -117,10 +120,42 @@ public class MetricsRequestMarshalerBenchmark { } @Benchmark - public ByteArrayOutputStream marshaler() throws IOException { + public int marshalStateful() throws IOException { MetricsRequestMarshaler marshaler = MetricsRequestMarshaler.create(METRICS); - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - marshaler.writeBinaryTo(bos); - return bos; + OUTPUT.reset(); + marshaler.writeBinaryTo(OUTPUT); + return OUTPUT.getCount(); + } + + @Benchmark + public int marshalStatefulJson() throws IOException { + MetricsRequestMarshaler marshaler = MetricsRequestMarshaler.create(METRICS); + OUTPUT.reset(); + marshaler.writeJsonTo(OUTPUT); + return OUTPUT.getCount(); + } + + @Benchmark + public int marshalStateless() throws IOException { + MARSHALER.initialize(METRICS); + try { + OUTPUT.reset(); + MARSHALER.writeBinaryTo(OUTPUT); + return OUTPUT.getCount(); + } finally { + MARSHALER.reset(); + } + } + + @Benchmark + public int marshalStatelessJson() throws IOException { + MARSHALER.initialize(METRICS); + try { + OUTPUT.reset(); + MARSHALER.writeJsonTo(OUTPUT); + return OUTPUT.getCount(); + } finally { + MARSHALER.reset(); + } } } diff --git a/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/RequestMarshalBenchmarks.java b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/RequestMarshalBenchmarks.java index b84b5ccca0e..fbdfa290011 100644 --- a/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/RequestMarshalBenchmarks.java +++ b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/RequestMarshalBenchmarks.java @@ -5,8 +5,8 @@ package io.opentelemetry.exporter.internal.otlp; +import io.opentelemetry.exporter.internal.otlp.traces.LowAllocationTraceRequestMarshaler; import io.opentelemetry.exporter.internal.otlp.traces.TraceRequestMarshaler; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.concurrent.TimeUnit; import org.openjdk.jmh.annotations.Benchmark; @@ -25,29 +25,72 @@ @Fork(1) public class RequestMarshalBenchmarks { + private static final LowAllocationTraceRequestMarshaler MARSHALER = + new LowAllocationTraceRequestMarshaler(); + private static final TestOutputStream OUTPUT = new TestOutputStream(); + @Benchmark @Threads(1) - public ByteArrayOutputStream createCustomMarshal(RequestMarshalState state) { + public int createStatefulMarshaler(RequestMarshalState state) { TraceRequestMarshaler requestMarshaler = TraceRequestMarshaler.create(state.spanDataList); - return new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + return requestMarshaler.getBinarySerializedSize(); } @Benchmark @Threads(1) - public ByteArrayOutputStream marshalCustom(RequestMarshalState state) throws IOException { + public int marshalStatefulBinary(RequestMarshalState state) throws IOException { TraceRequestMarshaler requestMarshaler = TraceRequestMarshaler.create(state.spanDataList); - ByteArrayOutputStream customOutput = - new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); - requestMarshaler.writeBinaryTo(customOutput); - return customOutput; + OUTPUT.reset(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeBinaryTo(OUTPUT); + return OUTPUT.getCount(); } @Benchmark @Threads(1) - public ByteArrayOutputStream marshalJson(RequestMarshalState state) throws IOException { + public int marshalStatefulJson(RequestMarshalState state) throws IOException { TraceRequestMarshaler requestMarshaler = TraceRequestMarshaler.create(state.spanDataList); - ByteArrayOutputStream customOutput = new ByteArrayOutputStream(); - requestMarshaler.writeJsonTo(customOutput); - return customOutput; + OUTPUT.reset(); + requestMarshaler.writeJsonTo(OUTPUT); + return OUTPUT.getCount(); + } + + @Benchmark + @Threads(1) + public int createStatelessMarshaler(RequestMarshalState state) { + LowAllocationTraceRequestMarshaler requestMarshaler = MARSHALER; + requestMarshaler.initialize(state.spanDataList); + try { + return requestMarshaler.getBinarySerializedSize(); + } finally { + requestMarshaler.reset(); + } + } + + @Benchmark + @Threads(1) + public int marshalStatelessBinary(RequestMarshalState state) throws IOException { + LowAllocationTraceRequestMarshaler requestMarshaler = MARSHALER; + requestMarshaler.initialize(state.spanDataList); + try { + OUTPUT.reset(); + requestMarshaler.writeBinaryTo(OUTPUT); + return OUTPUT.getCount(); + } finally { + requestMarshaler.reset(); + } + } + + @Benchmark + @Threads(1) + public int marshalStatelessJson(RequestMarshalState state) throws IOException { + LowAllocationTraceRequestMarshaler requestMarshaler = MARSHALER; + requestMarshaler.initialize(state.spanDataList); + try { + OUTPUT.reset(); + requestMarshaler.writeJsonTo(OUTPUT); + return OUTPUT.getCount(); + } finally { + requestMarshaler.reset(); + } } } diff --git a/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/StringMarshalBenchmark.java b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/StringMarshalBenchmark.java new file mode 100644 index 00000000000..724f3976cec --- /dev/null +++ b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/StringMarshalBenchmark.java @@ -0,0 +1,132 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; + +@BenchmarkMode({Mode.AverageTime}) +@OutputTimeUnit(TimeUnit.MICROSECONDS) +@Warmup(iterations = 5, time = 1) +@Measurement(iterations = 10, time = 1) +@Fork(1) +public class StringMarshalBenchmark { + private static final TestMarshaler MARSHALER_SAFE = new TestMarshaler(/* useUnsafe= */ false); + private static final TestMarshaler MARSHALER_UNSAFE = new TestMarshaler(/* useUnsafe= */ true); + private static final TestOutputStream OUTPUT = new TestOutputStream(); + + @Benchmark + @Threads(1) + public int marshalAsciiStringStateful(StringMarshalState state) throws IOException { + return marshalStateful(state.asciiString); + } + + @Benchmark + @Threads(1) + public int marshalLatin1StringStateful(StringMarshalState state) throws IOException { + return marshalStateful(state.latin1String); + } + + @Benchmark + @Threads(1) + public int marshalUnicodeStringStateful(StringMarshalState state) throws IOException { + return marshalStateful(state.unicodeString); + } + + private static int marshalStateful(String string) throws IOException { + OUTPUT.reset(); + Marshaler marshaler = StringAnyValueMarshaler.create(string); + marshaler.writeBinaryTo(OUTPUT); + return OUTPUT.getCount(); + } + + @Benchmark + @Threads(1) + public int marshalAsciiStringStatelessSafe(StringMarshalState state) throws IOException { + return marshalStateless(MARSHALER_SAFE, state.asciiString); + } + + @Benchmark + @Threads(1) + public int marshalAsciiStringStatelessUnsafe(StringMarshalState state) throws IOException { + return marshalStateless(MARSHALER_UNSAFE, state.asciiString); + } + + @Benchmark + @Threads(1) + public int marshalLatin1StringStatelessSafe(StringMarshalState state) throws IOException { + return marshalStateless(MARSHALER_SAFE, state.latin1String); + } + + @Benchmark + @Threads(1) + public int marshalLatin1StringStatelessUnsafe(StringMarshalState state) throws IOException { + return marshalStateless(MARSHALER_UNSAFE, state.latin1String); + } + + @Benchmark + @Threads(1) + public int marshalUnicodeStringStatelessSafe(StringMarshalState state) throws IOException { + return marshalStateless(MARSHALER_SAFE, state.unicodeString); + } + + @Benchmark + @Threads(1) + public int marshalUnicodeStringStatelessUnsafe(StringMarshalState state) throws IOException { + return marshalStateless(MARSHALER_UNSAFE, state.unicodeString); + } + + private static int marshalStateless(TestMarshaler marshaler, String string) throws IOException { + OUTPUT.reset(); + try { + marshaler.initialize(string); + marshaler.writeBinaryTo(OUTPUT); + return OUTPUT.getCount(); + } finally { + marshaler.reset(); + } + } + + private static class TestMarshaler extends Marshaler { + private final MarshalerContext context; + private int size; + private String value; + + TestMarshaler(boolean useUnsafe) { + context = new MarshalerContext(/* marshalStringNoAllocation= */ true, useUnsafe); + } + + public void initialize(String string) { + value = string; + size = StringAnyValueStatelessMarshaler.INSTANCE.getBinarySerializedSize(string, context); + } + + public void reset() { + context.reset(); + } + + @Override + public int getBinarySerializedSize() { + return size; + } + + @Override + public void writeTo(Serializer output) throws IOException { + StringAnyValueStatelessMarshaler.INSTANCE.writeTo(output, value, context); + } + } +} diff --git a/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/StringMarshalState.java b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/StringMarshalState.java new file mode 100644 index 00000000000..4d7e3aacd18 --- /dev/null +++ b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/StringMarshalState.java @@ -0,0 +1,37 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; + +@State(Scope.Benchmark) +public class StringMarshalState { + + @Param("512") + int stringSize; + + String asciiString; + String latin1String; + String unicodeString; + + @Setup + public void setup() { + asciiString = makeString('a', stringSize); + latin1String = makeString('ä', stringSize); + unicodeString = makeString('∆', stringSize); + } + + private static String makeString(char c, int size) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < size; i++) { + sb.append(c); + } + return sb.toString(); + } +} diff --git a/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/TestOutputStream.java b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/TestOutputStream.java new file mode 100644 index 00000000000..cfaba56ec3e --- /dev/null +++ b/exporters/otlp/common/src/jmh/java/io/opentelemetry/exporter/internal/otlp/TestOutputStream.java @@ -0,0 +1,42 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import java.io.OutputStream; + +class TestOutputStream extends OutputStream { + private int size; + private int count; + + TestOutputStream() { + this(-1); + } + + TestOutputStream(int size) { + this.size = size; + } + + @Override + public void write(int b) { + count++; + if (size > 0 && count > size) { + throw new IllegalStateException("max size exceeded"); + } + } + + void reset(int size) { + this.size = size; + this.count = 0; + } + + void reset() { + reset(-1); + } + + int getCount() { + return count; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AnyValueMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AnyValueMarshaler.java new file mode 100644 index 00000000000..327ad471e4e --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AnyValueMarshaler.java @@ -0,0 +1,44 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.api.common.KeyValue; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import java.nio.ByteBuffer; +import java.util.List; + +/** + * Utility methods for obtaining AnyValue marshaler. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class AnyValueMarshaler { + + private AnyValueMarshaler() {} + + @SuppressWarnings("unchecked") + public static MarshalerWithSize create(Value value) { + switch (value.getType()) { + case STRING: + return StringAnyValueMarshaler.create((String) value.getValue()); + case BOOLEAN: + return BoolAnyValueMarshaler.create((boolean) value.getValue()); + case LONG: + return IntAnyValueMarshaler.create((long) value.getValue()); + case DOUBLE: + return DoubleAnyValueMarshaler.create((double) value.getValue()); + case ARRAY: + return ArrayAnyValueMarshaler.createAnyValue((List>) value.getValue()); + case KEY_VALUE_LIST: + return KeyValueListAnyValueMarshaler.create((List) value.getValue()); + case BYTES: + return BytesAnyValueMarshaler.create((ByteBuffer) value.getValue()); + } + throw new IllegalArgumentException("Unsupported Value type: " + value.getType()); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AnyValueStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AnyValueStatelessMarshaler.java new file mode 100644 index 00000000000..bad0d9060d5 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AnyValueStatelessMarshaler.java @@ -0,0 +1,110 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.api.common.KeyValue; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.List; + +/** + * A Marshaler of key value pairs. See {@link AnyValueMarshaler}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class AnyValueStatelessMarshaler implements StatelessMarshaler> { + + public static final AnyValueStatelessMarshaler INSTANCE = new AnyValueStatelessMarshaler(); + + private AnyValueStatelessMarshaler() {} + + @SuppressWarnings("unchecked") + @Override + public void writeTo(Serializer output, Value value, MarshalerContext context) + throws IOException { + switch (value.getType()) { + case STRING: + StringAnyValueStatelessMarshaler.INSTANCE.writeTo( + output, (String) value.getValue(), context); + return; + case BOOLEAN: + BoolAnyValueStatelessMarshaler.INSTANCE.writeTo( + output, (Boolean) value.getValue(), context); + return; + case LONG: + IntAnyValueStatelessMarshaler.INSTANCE.writeTo(output, (Long) value.getValue(), context); + return; + case DOUBLE: + DoubleAnyValueStatelessMarshaler.INSTANCE.writeTo( + output, (Double) value.getValue(), context); + return; + case ARRAY: + output.serializeMessageWithContext( + io.opentelemetry.proto.common.v1.internal.AnyValue.ARRAY_VALUE, + (List>) value.getValue(), + ArrayAnyValueStatelessMarshaler.INSTANCE, + context); + return; + case KEY_VALUE_LIST: + output.serializeMessageWithContext( + io.opentelemetry.proto.common.v1.internal.AnyValue.KVLIST_VALUE, + (List) value.getValue(), + KeyValueListAnyValueStatelessMarshaler.INSTANCE, + context); + return; + case BYTES: + BytesAnyValueStatelessMarshaler.INSTANCE.writeTo( + output, (ByteBuffer) value.getValue(), context); + return; + } + // Error prone ensures the switch statement is complete, otherwise only can happen with + // unaligned versions which are not supported. + throw new IllegalArgumentException("Unsupported value type."); + } + + @SuppressWarnings("unchecked") + @Override + public int getBinarySerializedSize(Value value, MarshalerContext context) { + switch (value.getType()) { + case STRING: + return StringAnyValueStatelessMarshaler.INSTANCE.getBinarySerializedSize( + (String) value.getValue(), context); + case BOOLEAN: + return BoolAnyValueStatelessMarshaler.INSTANCE.getBinarySerializedSize( + (Boolean) value.getValue(), context); + case LONG: + return IntAnyValueStatelessMarshaler.INSTANCE.getBinarySerializedSize( + (Long) value.getValue(), context); + case DOUBLE: + return DoubleAnyValueStatelessMarshaler.INSTANCE.getBinarySerializedSize( + (Double) value.getValue(), context); + case ARRAY: + return StatelessMarshalerUtil.sizeMessageWithContext( + io.opentelemetry.proto.common.v1.internal.AnyValue.ARRAY_VALUE, + (List>) value.getValue(), + ArrayAnyValueStatelessMarshaler.INSTANCE, + context); + case KEY_VALUE_LIST: + return StatelessMarshalerUtil.sizeMessageWithContext( + io.opentelemetry.proto.common.v1.internal.AnyValue.KVLIST_VALUE, + (List) value.getValue(), + KeyValueListAnyValueStatelessMarshaler.INSTANCE, + context); + case BYTES: + return BytesAnyValueStatelessMarshaler.INSTANCE.getBinarySerializedSize( + (ByteBuffer) value.getValue(), context); + } + // Error prone ensures the switch statement is complete, otherwise only can happen with + // unaligned versions which are not supported. + throw new IllegalArgumentException("Unsupported value type."); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/ArrayAnyValueMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/ArrayAnyValueMarshaler.java new file mode 100644 index 00000000000..9b9ba2fc26e --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/ArrayAnyValueMarshaler.java @@ -0,0 +1,84 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.api.common.Value; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.common.v1.internal.AnyValue; +import io.opentelemetry.proto.common.v1.internal.ArrayValue; +import java.io.IOException; +import java.util.List; +import java.util.function.Function; + +final class ArrayAnyValueMarshaler extends MarshalerWithSize { + private final Marshaler value; + + private ArrayAnyValueMarshaler(ArrayValueMarshaler value) { + super(calculateSize(value)); + this.value = value; + } + + static MarshalerWithSize createAnyValue(List> values) { + return createInternal(values, AnyValueMarshaler::create); + } + + static MarshalerWithSize createString(List values) { + return createInternal(values, StringAnyValueMarshaler::create); + } + + static MarshalerWithSize createBool(List values) { + return createInternal(values, BoolAnyValueMarshaler::create); + } + + static MarshalerWithSize createInt(List values) { + return createInternal(values, IntAnyValueMarshaler::create); + } + + static MarshalerWithSize createDouble(List values) { + return createInternal(values, DoubleAnyValueMarshaler::create); + } + + private static MarshalerWithSize createInternal( + List values, Function initializer) { + int len = values.size(); + Marshaler[] marshalers = new Marshaler[len]; + for (int i = 0; i < len; i++) { + marshalers[i] = initializer.apply(values.get(i)); + } + return new ArrayAnyValueMarshaler(new ArrayValueMarshaler(marshalers)); + } + + @Override + public void writeTo(Serializer output) throws IOException { + output.serializeMessage(AnyValue.ARRAY_VALUE, value); + } + + private static int calculateSize(Marshaler value) { + return MarshalerUtil.sizeMessage(AnyValue.ARRAY_VALUE, value); + } + + private static class ArrayValueMarshaler extends MarshalerWithSize { + + private final Marshaler[] values; + + private ArrayValueMarshaler(Marshaler[] values) { + super(calculateSize(values)); + this.values = values; + } + + @Override + public void writeTo(Serializer output) throws IOException { + output.serializeRepeatedMessage(ArrayValue.VALUES, values); + } + + private static int calculateSize(Marshaler[] values) { + return MarshalerUtil.sizeRepeatedMessage(ArrayValue.VALUES, values); + } + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/ArrayAnyValueStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/ArrayAnyValueStatelessMarshaler.java new file mode 100644 index 00000000000..2ccc4a4eca3 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/ArrayAnyValueStatelessMarshaler.java @@ -0,0 +1,36 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.api.common.Value; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.common.v1.internal.ArrayValue; +import java.io.IOException; +import java.util.List; + +/** A Marshaler of key value pairs. See {@link ArrayAnyValueMarshaler}. */ +final class ArrayAnyValueStatelessMarshaler implements StatelessMarshaler>> { + + static final ArrayAnyValueStatelessMarshaler INSTANCE = new ArrayAnyValueStatelessMarshaler(); + + private ArrayAnyValueStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, List> value, MarshalerContext context) + throws IOException { + output.serializeRepeatedMessageWithContext( + ArrayValue.VALUES, value, AnyValueStatelessMarshaler.INSTANCE, context); + } + + @Override + public int getBinarySerializedSize(List> value, MarshalerContext context) { + return StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ArrayValue.VALUES, value, AnyValueStatelessMarshaler.INSTANCE, context); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AttributeArrayAnyValueStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AttributeArrayAnyValueStatelessMarshaler.java new file mode 100644 index 00000000000..db92ca1e7dc --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AttributeArrayAnyValueStatelessMarshaler.java @@ -0,0 +1,89 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.api.common.AttributeType; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler2; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.common.v1.internal.ArrayValue; +import java.io.IOException; +import java.util.List; + +/** See {@link ArrayAnyValueMarshaler}. */ +final class AttributeArrayAnyValueStatelessMarshaler + implements StatelessMarshaler2> { + static final AttributeArrayAnyValueStatelessMarshaler INSTANCE = + new AttributeArrayAnyValueStatelessMarshaler<>(); + + private AttributeArrayAnyValueStatelessMarshaler() {} + + @SuppressWarnings("unchecked") + @Override + public void writeTo(Serializer output, AttributeType type, List list, MarshalerContext context) + throws IOException { + switch (type) { + case STRING_ARRAY: + output.serializeRepeatedMessageWithContext( + ArrayValue.VALUES, + (List) list, + StringAnyValueStatelessMarshaler.INSTANCE, + context); + return; + case LONG_ARRAY: + output.serializeRepeatedMessageWithContext( + ArrayValue.VALUES, (List) list, IntAnyValueStatelessMarshaler.INSTANCE, context); + return; + case BOOLEAN_ARRAY: + output.serializeRepeatedMessageWithContext( + ArrayValue.VALUES, + (List) list, + BoolAnyValueStatelessMarshaler.INSTANCE, + context); + return; + case DOUBLE_ARRAY: + output.serializeRepeatedMessageWithContext( + ArrayValue.VALUES, + (List) list, + DoubleAnyValueStatelessMarshaler.INSTANCE, + context); + return; + default: + throw new IllegalArgumentException("Unsupported attribute type."); + } + } + + @SuppressWarnings("unchecked") + @Override + public int getBinarySerializedSize(AttributeType type, List list, MarshalerContext context) { + switch (type) { + case STRING_ARRAY: + return StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ArrayValue.VALUES, + (List) list, + StringAnyValueStatelessMarshaler.INSTANCE, + context); + case LONG_ARRAY: + return StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ArrayValue.VALUES, (List) list, IntAnyValueStatelessMarshaler.INSTANCE, context); + case BOOLEAN_ARRAY: + return StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ArrayValue.VALUES, + (List) list, + BoolAnyValueStatelessMarshaler.INSTANCE, + context); + case DOUBLE_ARRAY: + return StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ArrayValue.VALUES, + (List) list, + DoubleAnyValueStatelessMarshaler.INSTANCE, + context); + default: + throw new IllegalArgumentException("Unsupported attribute type."); + } + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AttributeKeyValue.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AttributeKeyValue.java new file mode 100644 index 00000000000..2b3cc82519e --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AttributeKeyValue.java @@ -0,0 +1,50 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.KeyValue; +import java.util.ArrayList; +import java.util.List; + +/** + * Key-value pair of {@link AttributeKey} key and its corresponding value. + * + *

Conceptually if {@link Attributes} is a Map, then this is a Map.Entry. Note that whilst {@link + * KeyValue} is similar, this class holds type information on the Key rather than the value. + * + *

NOTE: This class is only used in the profiling signal, and exists in this module and package + * because its a common dependency of the modules that use it. Long term, it probably makes more + * sense to live in {@code opentelemetry-sdk-profiles} once such a module exists. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public interface AttributeKeyValue { + + /** Returns a {@link AttributeKeyValue} for the given {@link AttributeKey} and {@code value}. */ + static AttributeKeyValue of(AttributeKey attributeKey, T value) { + return AttributeKeyValueImpl.create(attributeKey, value); + } + + /** Returns a List corresponding to the provided Map. This is a copy, not a view. */ + @SuppressWarnings("unchecked") + static List> of(Attributes attributes) { + List> result = new ArrayList<>(attributes.size()); + attributes.forEach( + (key, value) -> { + result.add(of((AttributeKey) key, (T) value)); + }); + return result; + } + + /** Returns the key. */ + AttributeKey getAttributeKey(); + + /** Returns the value. */ + T getValue(); +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AttributeKeyValueImpl.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AttributeKeyValueImpl.java new file mode 100644 index 00000000000..c4f6cdf89ca --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AttributeKeyValueImpl.java @@ -0,0 +1,19 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.api.common.AttributeKey; + +@AutoValue +abstract class AttributeKeyValueImpl implements AttributeKeyValue { + + AttributeKeyValueImpl() {} + + static AttributeKeyValueImpl create(AttributeKey attributeKey, T value) { + return new AutoValue_AttributeKeyValueImpl(attributeKey, value); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AttributeKeyValueStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AttributeKeyValueStatelessMarshaler.java new file mode 100644 index 00000000000..3fb1f7c25f6 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/AttributeKeyValueStatelessMarshaler.java @@ -0,0 +1,145 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.AttributeType; +import io.opentelemetry.api.internal.InternalAttributeKeyImpl; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler2; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.common.v1.internal.AnyValue; +import io.opentelemetry.proto.common.v1.internal.KeyValue; +import java.io.IOException; +import java.util.List; + +/** + * A Marshaler of key value pairs. See {@link KeyValueMarshaler}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class AttributeKeyValueStatelessMarshaler + implements StatelessMarshaler2, Object> { + public static final AttributeKeyValueStatelessMarshaler INSTANCE = + new AttributeKeyValueStatelessMarshaler(); + private static final byte[] EMPTY_BYTES = new byte[0]; + + private AttributeKeyValueStatelessMarshaler() {} + + @Override + public void writeTo( + Serializer output, AttributeKey attributeKey, Object value, MarshalerContext context) + throws IOException { + if (attributeKey.getKey().isEmpty()) { + output.serializeString(KeyValue.KEY, EMPTY_BYTES); + } else if (attributeKey instanceof InternalAttributeKeyImpl) { + byte[] keyUtf8 = ((InternalAttributeKeyImpl) attributeKey).getKeyUtf8(); + output.serializeString(KeyValue.KEY, keyUtf8); + } else { + output.serializeStringWithContext(KeyValue.KEY, attributeKey.getKey(), context); + } + output.serializeMessageWithContext( + KeyValue.VALUE, attributeKey, value, ValueStatelessMarshaler.INSTANCE, context); + } + + @Override + public int getBinarySerializedSize( + AttributeKey attributeKey, Object value, MarshalerContext context) { + int size = 0; + if (!attributeKey.getKey().isEmpty()) { + if (attributeKey instanceof InternalAttributeKeyImpl) { + byte[] keyUtf8 = ((InternalAttributeKeyImpl) attributeKey).getKeyUtf8(); + size += MarshalerUtil.sizeBytes(KeyValue.KEY, keyUtf8); + } else { + return StatelessMarshalerUtil.sizeStringWithContext( + KeyValue.KEY, attributeKey.getKey(), context); + } + } + size += + StatelessMarshalerUtil.sizeMessageWithContext( + KeyValue.VALUE, attributeKey, value, ValueStatelessMarshaler.INSTANCE, context); + + return size; + } + + private static class ValueStatelessMarshaler + implements StatelessMarshaler2, Object> { + static final ValueStatelessMarshaler INSTANCE = new ValueStatelessMarshaler(); + + @SuppressWarnings("unchecked") + @Override + public int getBinarySerializedSize( + AttributeKey attributeKey, Object value, MarshalerContext context) { + AttributeType attributeType = attributeKey.getType(); + switch (attributeType) { + case STRING: + return StringAnyValueStatelessMarshaler.INSTANCE.getBinarySerializedSize( + (String) value, context); + case LONG: + return IntAnyValueStatelessMarshaler.INSTANCE.getBinarySerializedSize( + (Long) value, context); + case BOOLEAN: + return BoolAnyValueStatelessMarshaler.INSTANCE.getBinarySerializedSize( + (Boolean) value, context); + case DOUBLE: + return DoubleAnyValueStatelessMarshaler.INSTANCE.getBinarySerializedSize( + (Double) value, context); + case STRING_ARRAY: + case LONG_ARRAY: + case BOOLEAN_ARRAY: + case DOUBLE_ARRAY: + return StatelessMarshalerUtil.sizeMessageWithContext( + AnyValue.ARRAY_VALUE, + attributeType, + (List) value, + AttributeArrayAnyValueStatelessMarshaler.INSTANCE, + context); + } + // Error prone ensures the switch statement is complete, otherwise only can happen with + // unaligned versions which are not supported. + throw new IllegalArgumentException("Unsupported attribute type."); + } + + @SuppressWarnings("unchecked") + @Override + public void writeTo( + Serializer output, AttributeKey attributeKey, Object value, MarshalerContext context) + throws IOException { + AttributeType attributeType = attributeKey.getType(); + switch (attributeType) { + case STRING: + StringAnyValueStatelessMarshaler.INSTANCE.writeTo(output, (String) value, context); + return; + case LONG: + IntAnyValueStatelessMarshaler.INSTANCE.writeTo(output, (Long) value, context); + return; + case BOOLEAN: + BoolAnyValueStatelessMarshaler.INSTANCE.writeTo(output, (Boolean) value, context); + return; + case DOUBLE: + DoubleAnyValueStatelessMarshaler.INSTANCE.writeTo(output, (Double) value, context); + return; + case STRING_ARRAY: + case LONG_ARRAY: + case BOOLEAN_ARRAY: + case DOUBLE_ARRAY: + output.serializeMessageWithContext( + AnyValue.ARRAY_VALUE, + attributeType, + (List) value, + AttributeArrayAnyValueStatelessMarshaler.INSTANCE, + context); + return; + } + // Error prone ensures the switch statement is complete, otherwise only can happen with + // unaligned versions which are not supported. + throw new IllegalArgumentException("Unsupported attribute type."); + } + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/BoolAnyValueMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/BoolAnyValueMarshaler.java new file mode 100644 index 00000000000..2293c0c0e58 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/BoolAnyValueMarshaler.java @@ -0,0 +1,37 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.exporter.internal.marshal.CodedOutputStream; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.common.v1.internal.AnyValue; +import java.io.IOException; + +final class BoolAnyValueMarshaler extends MarshalerWithSize { + + private final boolean value; + + private BoolAnyValueMarshaler(boolean value) { + super(calculateSize(value)); + this.value = value; + } + + static MarshalerWithSize create(boolean value) { + return new BoolAnyValueMarshaler(value); + } + + @Override + public void writeTo(Serializer output) throws IOException { + // Do not call serialize* method because we always have to write the message tag even if the + // value is empty since it's a oneof. + output.writeBool(AnyValue.BOOL_VALUE, value); + } + + private static int calculateSize(boolean value) { + return AnyValue.BOOL_VALUE.getTagSize() + CodedOutputStream.computeBoolSizeNoTag(value); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/BoolAnyValueStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/BoolAnyValueStatelessMarshaler.java new file mode 100644 index 00000000000..950aaa39f7d --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/BoolAnyValueStatelessMarshaler.java @@ -0,0 +1,31 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.exporter.internal.marshal.CodedOutputStream; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.proto.common.v1.internal.AnyValue; +import java.io.IOException; + +/** See {@link BoolAnyValueMarshaler}. */ +final class BoolAnyValueStatelessMarshaler implements StatelessMarshaler { + static final BoolAnyValueStatelessMarshaler INSTANCE = new BoolAnyValueStatelessMarshaler(); + + private BoolAnyValueStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, Boolean value, MarshalerContext context) + throws IOException { + output.writeBool(AnyValue.BOOL_VALUE, value); + } + + @Override + public int getBinarySerializedSize(Boolean value, MarshalerContext context) { + return AnyValue.BOOL_VALUE.getTagSize() + CodedOutputStream.computeBoolSizeNoTag(value); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/BytesAnyValueMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/BytesAnyValueMarshaler.java new file mode 100644 index 00000000000..d0a781039be --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/BytesAnyValueMarshaler.java @@ -0,0 +1,40 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.exporter.internal.marshal.CodedOutputStream; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.common.v1.internal.AnyValue; +import java.io.IOException; +import java.nio.ByteBuffer; + +final class BytesAnyValueMarshaler extends MarshalerWithSize { + + private final byte[] value; + + private BytesAnyValueMarshaler(byte[] value) { + super(calculateSize(value)); + this.value = value; + } + + static MarshalerWithSize create(ByteBuffer value) { + byte[] bytes = new byte[value.remaining()]; + value.get(bytes); + return new BytesAnyValueMarshaler(bytes); + } + + @Override + public void writeTo(Serializer output) throws IOException { + // Do not call serialize* method because we always have to write the message tag even if the + // value is empty since it's a oneof. + output.writeBytes(AnyValue.BYTES_VALUE, value); + } + + private static int calculateSize(byte[] value) { + return AnyValue.BYTES_VALUE.getTagSize() + CodedOutputStream.computeByteArraySizeNoTag(value); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/BytesAnyValueStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/BytesAnyValueStatelessMarshaler.java new file mode 100644 index 00000000000..767ff8b8176 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/BytesAnyValueStatelessMarshaler.java @@ -0,0 +1,36 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.exporter.internal.marshal.CodedOutputStream; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.proto.common.v1.internal.AnyValue; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** See {@link BytesAnyValueMarshaler}. */ +final class BytesAnyValueStatelessMarshaler implements StatelessMarshaler { + static final BytesAnyValueStatelessMarshaler INSTANCE = new BytesAnyValueStatelessMarshaler(); + + private BytesAnyValueStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, ByteBuffer value, MarshalerContext context) + throws IOException { + byte[] bytes = context.getData(byte[].class); + output.writeBytes(AnyValue.BYTES_VALUE, bytes); + } + + @Override + public int getBinarySerializedSize(ByteBuffer value, MarshalerContext context) { + byte[] bytes = new byte[value.remaining()]; + value.get(bytes); + context.addData(bytes); + return AnyValue.BYTES_VALUE.getTagSize() + CodedOutputStream.computeByteArraySizeNoTag(bytes); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/DoubleAnyValueMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/DoubleAnyValueMarshaler.java new file mode 100644 index 00000000000..5837976c92d --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/DoubleAnyValueMarshaler.java @@ -0,0 +1,37 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.exporter.internal.marshal.CodedOutputStream; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.common.v1.internal.AnyValue; +import java.io.IOException; + +final class DoubleAnyValueMarshaler extends MarshalerWithSize { + + private final double value; + + private DoubleAnyValueMarshaler(double value) { + super(calculateSize(value)); + this.value = value; + } + + static MarshalerWithSize create(double value) { + return new DoubleAnyValueMarshaler(value); + } + + @Override + public void writeTo(Serializer output) throws IOException { + // Do not call serialize* method because we always have to write the message tag even if the + // value is empty since it's a oneof. + output.writeDouble(AnyValue.DOUBLE_VALUE, value); + } + + private static int calculateSize(double value) { + return AnyValue.DOUBLE_VALUE.getTagSize() + CodedOutputStream.computeDoubleSizeNoTag(value); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/DoubleAnyValueStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/DoubleAnyValueStatelessMarshaler.java new file mode 100644 index 00000000000..7403b241ab2 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/DoubleAnyValueStatelessMarshaler.java @@ -0,0 +1,31 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.exporter.internal.marshal.CodedOutputStream; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.proto.common.v1.internal.AnyValue; +import java.io.IOException; + +/** See {@link DoubleAnyValueMarshaler}. */ +final class DoubleAnyValueStatelessMarshaler implements StatelessMarshaler { + static final DoubleAnyValueStatelessMarshaler INSTANCE = new DoubleAnyValueStatelessMarshaler(); + + private DoubleAnyValueStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, Double value, MarshalerContext context) + throws IOException { + output.writeDouble(AnyValue.DOUBLE_VALUE, value); + } + + @Override + public int getBinarySerializedSize(Double value, MarshalerContext context) { + return AnyValue.DOUBLE_VALUE.getTagSize() + CodedOutputStream.computeDoubleSizeNoTag(value); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/InstrumentationScopeMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/InstrumentationScopeMarshaler.java index 1d0a1d67349..c5466cad479 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/InstrumentationScopeMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/InstrumentationScopeMarshaler.java @@ -37,7 +37,8 @@ public static InstrumentationScopeMarshaler create(InstrumentationScopeInfo scop // a few times until the cache gets filled which is fine. byte[] name = MarshalerUtil.toBytes(scopeInfo.getName()); byte[] version = MarshalerUtil.toBytes(scopeInfo.getVersion()); - KeyValueMarshaler[] attributes = KeyValueMarshaler.createRepeated(scopeInfo.getAttributes()); + KeyValueMarshaler[] attributes = + KeyValueMarshaler.createForAttributes(scopeInfo.getAttributes()); RealInstrumentationScopeMarshaler realMarshaler = new RealInstrumentationScopeMarshaler(name, version, attributes); diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/IntAnyValueMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/IntAnyValueMarshaler.java new file mode 100644 index 00000000000..498c60e76bb --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/IntAnyValueMarshaler.java @@ -0,0 +1,37 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.exporter.internal.marshal.CodedOutputStream; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.common.v1.internal.AnyValue; +import java.io.IOException; + +final class IntAnyValueMarshaler extends MarshalerWithSize { + + private final long value; + + private IntAnyValueMarshaler(long value) { + super(calculateSize(value)); + this.value = value; + } + + static MarshalerWithSize create(long value) { + return new IntAnyValueMarshaler(value); + } + + @Override + public void writeTo(Serializer output) throws IOException { + // Do not call serialize* method because we always have to write the message tag even if the + // value is empty since it's a oneof. + output.writeInt64(AnyValue.INT_VALUE, value); + } + + private static int calculateSize(long value) { + return AnyValue.INT_VALUE.getTagSize() + CodedOutputStream.computeInt64SizeNoTag(value); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/IntAnyValueStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/IntAnyValueStatelessMarshaler.java new file mode 100644 index 00000000000..407a586ec80 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/IntAnyValueStatelessMarshaler.java @@ -0,0 +1,30 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.exporter.internal.marshal.CodedOutputStream; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.proto.common.v1.internal.AnyValue; +import java.io.IOException; + +/** See {@link IntAnyValueMarshaler}. */ +final class IntAnyValueStatelessMarshaler implements StatelessMarshaler { + static final IntAnyValueStatelessMarshaler INSTANCE = new IntAnyValueStatelessMarshaler(); + + private IntAnyValueStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, Long value, MarshalerContext context) throws IOException { + output.writeInt64(AnyValue.INT_VALUE, value); + } + + @Override + public int getBinarySerializedSize(Long value, MarshalerContext context) { + return AnyValue.INT_VALUE.getTagSize() + CodedOutputStream.computeInt64SizeNoTag(value); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueListAnyValueMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueListAnyValueMarshaler.java new file mode 100644 index 00000000000..1e5b345acae --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueListAnyValueMarshaler.java @@ -0,0 +1,63 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.api.common.KeyValue; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.common.v1.internal.AnyValue; +import io.opentelemetry.proto.common.v1.internal.KeyValueList; +import java.io.IOException; +import java.util.List; + +final class KeyValueListAnyValueMarshaler extends MarshalerWithSize { + + private final Marshaler value; + + private KeyValueListAnyValueMarshaler(KeyValueListMarshaler value) { + super(calculateSize(value)); + this.value = value; + } + + static MarshalerWithSize create(List values) { + int len = values.size(); + KeyValueMarshaler[] marshalers = new KeyValueMarshaler[values.size()]; + for (int i = 0; i < len; i++) { + marshalers[i] = KeyValueMarshaler.createForKeyValue(values.get(i)); + } + return new KeyValueListAnyValueMarshaler(new KeyValueListMarshaler(marshalers)); + } + + @Override + public void writeTo(Serializer output) throws IOException { + output.serializeMessage(AnyValue.KVLIST_VALUE, value); + } + + private static int calculateSize(Marshaler value) { + return MarshalerUtil.sizeMessage(AnyValue.KVLIST_VALUE, value); + } + + private static class KeyValueListMarshaler extends MarshalerWithSize { + + private final Marshaler[] values; + + private KeyValueListMarshaler(KeyValueMarshaler[] values) { + super(calculateSize(values)); + this.values = values; + } + + @Override + public void writeTo(Serializer output) throws IOException { + output.serializeRepeatedMessage(KeyValueList.VALUES, values); + } + + private static int calculateSize(Marshaler[] values) { + return MarshalerUtil.sizeRepeatedMessage(KeyValueList.VALUES, values); + } + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueListAnyValueStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueListAnyValueStatelessMarshaler.java new file mode 100644 index 00000000000..6bb7ef8d210 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueListAnyValueStatelessMarshaler.java @@ -0,0 +1,37 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.api.common.KeyValue; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.common.v1.internal.KeyValueList; +import java.io.IOException; +import java.util.List; + +/** A Marshaler of key value pairs. See {@link KeyValueListAnyValueMarshaler}. */ +final class KeyValueListAnyValueStatelessMarshaler implements StatelessMarshaler> { + + static final KeyValueListAnyValueStatelessMarshaler INSTANCE = + new KeyValueListAnyValueStatelessMarshaler(); + + private KeyValueListAnyValueStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, List value, MarshalerContext context) + throws IOException { + output.serializeRepeatedMessageWithContext( + KeyValueList.VALUES, value, KeyValueStatelessMarshaler.INSTANCE, context); + } + + @Override + public int getBinarySerializedSize(List value, MarshalerContext context) { + return StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + KeyValueList.VALUES, value, KeyValueStatelessMarshaler.INSTANCE, context); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueMarshaler.java index 18f151a8f0c..08b9699291d 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueMarshaler.java @@ -7,52 +7,86 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.KeyValue; import io.opentelemetry.api.internal.InternalAttributeKeyImpl; -import io.opentelemetry.exporter.internal.marshal.CodedOutputStream; import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; import io.opentelemetry.exporter.internal.marshal.Serializer; -import io.opentelemetry.proto.common.v1.internal.AnyValue; -import io.opentelemetry.proto.common.v1.internal.ArrayValue; -import io.opentelemetry.proto.common.v1.internal.KeyValue; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.function.BiConsumer; +import java.util.function.Consumer; /** - * A Marshaler of {@link Attributes}. + * A Marshaler of key value pairs. * *

This class is internal and is hence not for public use. Its APIs are unstable and can change * at any time. */ public final class KeyValueMarshaler extends MarshalerWithSize { + private static final byte[] EMPTY_BYTES = new byte[0]; private static final KeyValueMarshaler[] EMPTY_REPEATED = new KeyValueMarshaler[0]; + private final byte[] keyUtf8; + private final Marshaler value; + + private KeyValueMarshaler(byte[] keyUtf8, Marshaler value) { + super(calculateSize(keyUtf8, value)); + this.keyUtf8 = keyUtf8; + this.value = value; + } + + /** Returns Marshaler for the given KeyValue. */ + public static KeyValueMarshaler createForKeyValue(KeyValue keyValue) { + return new KeyValueMarshaler( + keyValue.getKey().getBytes(StandardCharsets.UTF_8), + AnyValueMarshaler.create(keyValue.getValue())); + } + /** Returns Marshalers for the given Attributes. */ @SuppressWarnings("AvoidObjectArrays") - public static KeyValueMarshaler[] createRepeated(Attributes attributes) { + public static KeyValueMarshaler[] createForAttributes(Attributes attributes) { if (attributes.isEmpty()) { return EMPTY_REPEATED; } - KeyValueMarshaler[] attributeMarshalers = new KeyValueMarshaler[attributes.size()]; + KeyValueMarshaler[] marshalers = new KeyValueMarshaler[attributes.size()]; attributes.forEach( new BiConsumer, Object>() { int index = 0; @Override public void accept(AttributeKey attributeKey, Object o) { - attributeMarshalers[index++] = KeyValueMarshaler.create(attributeKey, o); + marshalers[index++] = create(attributeKey, o); } }); - return attributeMarshalers; + return marshalers; } - private final byte[] keyUtf8; - private final Marshaler value; + @SuppressWarnings("AvoidObjectArrays") + public static KeyValueMarshaler[] createRepeated(List> items) { + if (items.isEmpty()) { + return EMPTY_REPEATED; + } + + KeyValueMarshaler[] keyValueMarshalers = new KeyValueMarshaler[items.size()]; + items.forEach( + item -> + new Consumer>() { + int index = 0; + + @Override + public void accept(AttributeKeyValue attributeKeyValue) { + keyValueMarshalers[index++] = + KeyValueMarshaler.create( + attributeKeyValue.getAttributeKey(), attributeKeyValue.getValue()); + } + }); + return keyValueMarshalers; + } @SuppressWarnings("unchecked") private static KeyValueMarshaler create(AttributeKey attributeKey, Object value) { @@ -66,188 +100,42 @@ private static KeyValueMarshaler create(AttributeKey attributeKey, Object val } switch (attributeKey.getType()) { case STRING: - return new KeyValueMarshaler( - keyUtf8, new StringAnyValueMarshaler(MarshalerUtil.toBytes((String) value))); + return new KeyValueMarshaler(keyUtf8, StringAnyValueMarshaler.create((String) value)); case LONG: - return new KeyValueMarshaler(keyUtf8, new Int64AnyValueMarshaler((long) value)); + return new KeyValueMarshaler(keyUtf8, IntAnyValueMarshaler.create((long) value)); case BOOLEAN: - return new KeyValueMarshaler(keyUtf8, new BoolAnyValueMarshaler((boolean) value)); + return new KeyValueMarshaler(keyUtf8, BoolAnyValueMarshaler.create((boolean) value)); case DOUBLE: - return new KeyValueMarshaler(keyUtf8, new AnyDoubleFieldMarshaler((double) value)); + return new KeyValueMarshaler(keyUtf8, DoubleAnyValueMarshaler.create((double) value)); case STRING_ARRAY: return new KeyValueMarshaler( - keyUtf8, - new ArrayAnyValueMarshaler(ArrayValueMarshaler.createString((List) value))); + keyUtf8, ArrayAnyValueMarshaler.createString((List) value)); case LONG_ARRAY: - return new KeyValueMarshaler( - keyUtf8, - new ArrayAnyValueMarshaler(ArrayValueMarshaler.createInt64((List) value))); + return new KeyValueMarshaler(keyUtf8, ArrayAnyValueMarshaler.createInt((List) value)); case BOOLEAN_ARRAY: return new KeyValueMarshaler( - keyUtf8, - new ArrayAnyValueMarshaler(ArrayValueMarshaler.createBool((List) value))); + keyUtf8, ArrayAnyValueMarshaler.createBool((List) value)); case DOUBLE_ARRAY: return new KeyValueMarshaler( - keyUtf8, - new ArrayAnyValueMarshaler(ArrayValueMarshaler.createDouble((List) value))); + keyUtf8, ArrayAnyValueMarshaler.createDouble((List) value)); } // Error prone ensures the switch statement is complete, otherwise only can happen with // unaligned versions which are not supported. throw new IllegalArgumentException("Unsupported attribute type."); } - private KeyValueMarshaler(byte[] keyUtf8, Marshaler value) { - super(calculateSize(keyUtf8, value)); - this.keyUtf8 = keyUtf8; - this.value = value; - } - @Override public void writeTo(Serializer output) throws IOException { - output.serializeString(KeyValue.KEY, keyUtf8); - output.serializeMessage(KeyValue.VALUE, value); + output.serializeString(io.opentelemetry.proto.common.v1.internal.KeyValue.KEY, keyUtf8); + output.serializeMessage(io.opentelemetry.proto.common.v1.internal.KeyValue.VALUE, value); } private static int calculateSize(byte[] keyUtf8, Marshaler value) { int size = 0; - size += MarshalerUtil.sizeBytes(KeyValue.KEY, keyUtf8); - size += MarshalerUtil.sizeMessage(KeyValue.VALUE, value); + size += + MarshalerUtil.sizeBytes(io.opentelemetry.proto.common.v1.internal.KeyValue.KEY, keyUtf8); + size += + MarshalerUtil.sizeMessage(io.opentelemetry.proto.common.v1.internal.KeyValue.VALUE, value); return size; } - - private static class BoolAnyValueMarshaler extends MarshalerWithSize { - - private final boolean value; - - BoolAnyValueMarshaler(boolean value) { - super(calculateSize(value)); - this.value = value; - } - - @Override - public void writeTo(Serializer output) throws IOException { - // Do not call serialize* method because we always have to write the message tag even if the - // value is empty since it's a oneof. - output.writeBool(AnyValue.BOOL_VALUE, value); - } - - private static int calculateSize(boolean value) { - return AnyValue.BOOL_VALUE.getTagSize() + CodedOutputStream.computeBoolSizeNoTag(value); - } - } - - private static class Int64AnyValueMarshaler extends MarshalerWithSize { - - private final long value; - - Int64AnyValueMarshaler(long value) { - super(calculateSize(value)); - this.value = value; - } - - @Override - public void writeTo(Serializer output) throws IOException { - // Do not call serialize* method because we always have to write the message tag even if the - // value is empty since it's a oneof. - output.writeInt64(AnyValue.INT_VALUE, value); - } - - private static int calculateSize(long value) { - return AnyValue.INT_VALUE.getTagSize() + CodedOutputStream.computeInt64SizeNoTag(value); - } - } - - private static class AnyDoubleFieldMarshaler extends MarshalerWithSize { - - private final double value; - - AnyDoubleFieldMarshaler(double value) { - super(calculateSize(value)); - this.value = value; - } - - @Override - public void writeTo(Serializer output) throws IOException { - // Do not call serialize* method because we always have to write the message tag even if the - // value is empty since it's a oneof. - output.writeDouble(AnyValue.DOUBLE_VALUE, value); - } - - private static int calculateSize(double value) { - return AnyValue.DOUBLE_VALUE.getTagSize() + CodedOutputStream.computeDoubleSizeNoTag(value); - } - } - - private static class ArrayAnyValueMarshaler extends MarshalerWithSize { - private final Marshaler value; - - private ArrayAnyValueMarshaler(Marshaler value) { - super(calculateSize(value)); - this.value = value; - } - - @Override - public void writeTo(Serializer output) throws IOException { - output.serializeMessage(AnyValue.ARRAY_VALUE, value); - } - - private static int calculateSize(Marshaler value) { - return MarshalerUtil.sizeMessage(AnyValue.ARRAY_VALUE, value); - } - } - - private static class ArrayValueMarshaler extends MarshalerWithSize { - - static ArrayValueMarshaler createString(List values) { - int len = values.size(); - Marshaler[] marshalers = new StringAnyValueMarshaler[len]; - for (int i = 0; i < len; i++) { - marshalers[i] = new StringAnyValueMarshaler(values.get(i).getBytes(StandardCharsets.UTF_8)); - } - return new ArrayValueMarshaler(marshalers); - } - - static ArrayValueMarshaler createBool(List values) { - int len = values.size(); - Marshaler[] marshalers = new BoolAnyValueMarshaler[len]; - for (int i = 0; i < len; i++) { - marshalers[i] = new BoolAnyValueMarshaler(values.get(i)); - } - return new ArrayValueMarshaler(marshalers); - } - - static ArrayValueMarshaler createInt64(List values) { - int len = values.size(); - Marshaler[] marshalers = new Int64AnyValueMarshaler[len]; - for (int i = 0; i < len; i++) { - marshalers[i] = new Int64AnyValueMarshaler(values.get(i)); - } - return new ArrayValueMarshaler(marshalers); - } - - static ArrayValueMarshaler createDouble(List values) { - int len = values.size(); - Marshaler[] marshalers = new AnyDoubleFieldMarshaler[len]; - for (int i = 0; i < len; i++) { - marshalers[i] = new AnyDoubleFieldMarshaler(values.get(i)); - } - return new ArrayValueMarshaler(marshalers); - } - - private final Marshaler[] values; - - private ArrayValueMarshaler(Marshaler[] values) { - super(calculateSize(values)); - this.values = values; - } - - @Override - public void writeTo(Serializer output) throws IOException { - output.serializeRepeatedMessage(ArrayValue.VALUES, values); - } - - private static int calculateSize(Marshaler[] values) { - return MarshalerUtil.sizeRepeatedMessage(ArrayValue.VALUES, values); - } - } } diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueStatelessMarshaler.java new file mode 100644 index 00000000000..09607917322 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/KeyValueStatelessMarshaler.java @@ -0,0 +1,63 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.api.common.KeyValue; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import java.io.IOException; + +/** + * A Marshaler of key value pairs. See {@link AnyValueMarshaler}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class KeyValueStatelessMarshaler implements StatelessMarshaler { + + public static final KeyValueStatelessMarshaler INSTANCE = new KeyValueStatelessMarshaler(); + private static final byte[] EMPTY_BYTES = new byte[0]; + + private KeyValueStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, KeyValue value, MarshalerContext context) + throws IOException { + String key = value.getKey(); + if (key.isEmpty()) { + output.serializeString(io.opentelemetry.proto.common.v1.internal.KeyValue.KEY, EMPTY_BYTES); + } else { + output.serializeStringWithContext( + io.opentelemetry.proto.common.v1.internal.KeyValue.KEY, key, context); + } + output.serializeMessageWithContext( + io.opentelemetry.proto.common.v1.internal.KeyValue.VALUE, + value.getValue(), + AnyValueStatelessMarshaler.INSTANCE, + context); + } + + @Override + public int getBinarySerializedSize(KeyValue value, MarshalerContext context) { + int size = 0; + String key = value.getKey(); + if (!key.isEmpty()) { + size += + StatelessMarshalerUtil.sizeStringWithContext( + io.opentelemetry.proto.common.v1.internal.KeyValue.KEY, key, context); + } + size += + StatelessMarshalerUtil.sizeMessageWithContext( + io.opentelemetry.proto.common.v1.internal.KeyValue.VALUE, + value.getValue(), + AnyValueStatelessMarshaler.INSTANCE, + context); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/ResourceMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/ResourceMarshaler.java index 259f0070000..b3395448a79 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/ResourceMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/ResourceMarshaler.java @@ -36,7 +36,8 @@ public static ResourceMarshaler create(io.opentelemetry.sdk.resources.Resource r // a few times until the cache gets filled which is fine. RealResourceMarshaler realMarshaler = - new RealResourceMarshaler(KeyValueMarshaler.createRepeated(resource.getAttributes())); + new RealResourceMarshaler( + KeyValueMarshaler.createForAttributes(resource.getAttributes())); ByteArrayOutputStream binaryBos = new ByteArrayOutputStream(realMarshaler.getBinarySerializedSize()); diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/StringAnyValueMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/StringAnyValueMarshaler.java index 81e1ab2c7be..cc7bf4527c6 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/StringAnyValueMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/StringAnyValueMarshaler.java @@ -6,6 +6,7 @@ package io.opentelemetry.exporter.internal.otlp; import io.opentelemetry.exporter.internal.marshal.CodedOutputStream; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; import io.opentelemetry.exporter.internal.marshal.Serializer; import io.opentelemetry.proto.common.v1.internal.AnyValue; @@ -17,23 +18,31 @@ *

This class is internal and is hence not for public use. Its APIs are unstable and can change * at any time. */ -public final class StringAnyValueMarshaler extends MarshalerWithSize { +final class StringAnyValueMarshaler extends MarshalerWithSize { private final byte[] valueUtf8; - public StringAnyValueMarshaler(byte[] valueUtf8) { + private StringAnyValueMarshaler(byte[] valueUtf8) { super(calculateSize(valueUtf8)); this.valueUtf8 = valueUtf8; } + static MarshalerWithSize create(String value) { + return new StringAnyValueMarshaler(MarshalerUtil.toBytes(value)); + } + @Override public void writeTo(Serializer output) throws IOException { - // Do not call serialize* method because we always have to write the message tag even if the - // value is empty since it's a oneof. + if (valueUtf8.length == 0) { + return; + } output.writeString(AnyValue.STRING_VALUE, valueUtf8); } private static int calculateSize(byte[] valueUtf8) { + if (valueUtf8.length == 0) { + return 0; + } return AnyValue.STRING_VALUE.getTagSize() + CodedOutputStream.computeByteArraySizeNoTag(valueUtf8); } diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/StringAnyValueStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/StringAnyValueStatelessMarshaler.java new file mode 100644 index 00000000000..9d9af0b5d0c --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/StringAnyValueStatelessMarshaler.java @@ -0,0 +1,36 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.common.v1.internal.AnyValue; +import java.io.IOException; + +/** + * A Marshaler of string-valued {@link AnyValue}. See {@link StringAnyValueMarshaler}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +final class StringAnyValueStatelessMarshaler implements StatelessMarshaler { + static final StringAnyValueStatelessMarshaler INSTANCE = new StringAnyValueStatelessMarshaler(); + + private StringAnyValueStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, String value, MarshalerContext context) + throws IOException { + output.serializeStringWithContext(AnyValue.STRING_VALUE, value, context); + } + + @Override + public int getBinarySerializedSize(String value, MarshalerContext context) { + return StatelessMarshalerUtil.sizeStringWithContext(AnyValue.STRING_VALUE, value, context); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/InstrumentationScopeLogsStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/InstrumentationScopeLogsStatelessMarshaler.java new file mode 100644 index 00000000000..60789604b3e --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/InstrumentationScopeLogsStatelessMarshaler.java @@ -0,0 +1,63 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.logs; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler2; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.InstrumentationScopeMarshaler; +import io.opentelemetry.proto.logs.v1.internal.ScopeLogs; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import java.io.IOException; +import java.util.List; + +/** See {@link InstrumentationScopeLogsMarshaler}. */ +final class InstrumentationScopeLogsStatelessMarshaler + implements StatelessMarshaler2> { + static final InstrumentationScopeLogsStatelessMarshaler INSTANCE = + new InstrumentationScopeLogsStatelessMarshaler(); + + @Override + public void writeTo( + Serializer output, + InstrumentationScopeInfo instrumentationScope, + List logs, + MarshalerContext context) + throws IOException { + InstrumentationScopeMarshaler instrumentationScopeMarshaler = + context.getData(InstrumentationScopeMarshaler.class); + + output.serializeMessage(ScopeLogs.SCOPE, instrumentationScopeMarshaler); + output.serializeRepeatedMessageWithContext( + ScopeLogs.LOG_RECORDS, logs, LogStatelessMarshaler.INSTANCE, context); + output.serializeStringWithContext( + ScopeLogs.SCHEMA_URL, instrumentationScope.getSchemaUrl(), context); + } + + @Override + public int getBinarySerializedSize( + InstrumentationScopeInfo instrumentationScope, + List logs, + MarshalerContext context) { + InstrumentationScopeMarshaler instrumentationScopeMarshaler = + InstrumentationScopeMarshaler.create(instrumentationScope); + context.addData(instrumentationScopeMarshaler); + + int size = 0; + size += MarshalerUtil.sizeMessage(ScopeLogs.SCOPE, instrumentationScopeMarshaler); + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ScopeLogs.LOG_RECORDS, logs, LogStatelessMarshaler.INSTANCE, context); + size += + StatelessMarshalerUtil.sizeStringWithContext( + ScopeLogs.SCHEMA_URL, instrumentationScope.getSchemaUrl(), context); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LogMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LogMarshaler.java index bd2c6b3b8a2..0ad265e9655 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LogMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LogMarshaler.java @@ -14,36 +14,40 @@ import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; import io.opentelemetry.exporter.internal.marshal.ProtoEnumInfo; import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.otlp.AnyValueMarshaler; import io.opentelemetry.exporter.internal.otlp.KeyValueMarshaler; -import io.opentelemetry.exporter.internal.otlp.StringAnyValueMarshaler; import io.opentelemetry.proto.logs.v1.internal.LogRecord; import io.opentelemetry.proto.logs.v1.internal.SeverityNumber; import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.data.internal.ExtendedLogRecordData; import java.io.IOException; import javax.annotation.Nullable; final class LogMarshaler extends MarshalerWithSize { private static final String INVALID_TRACE_ID = TraceId.getInvalid(); private static final String INVALID_SPAN_ID = SpanId.getInvalid(); + private static final byte[] EMPTY_BYTES = new byte[0]; private final long timeUnixNano; private final long observedTimeUnixNano; private final ProtoEnumInfo severityNumber; private final byte[] severityText; - private final MarshalerWithSize anyValueMarshaler; + @Nullable private final MarshalerWithSize anyValueMarshaler; private final KeyValueMarshaler[] attributeMarshalers; private final int droppedAttributesCount; private final TraceFlags traceFlags; @Nullable private final String traceId; @Nullable private final String spanId; + private final byte[] eventName; static LogMarshaler create(LogRecordData logRecordData) { KeyValueMarshaler[] attributeMarshalers = - KeyValueMarshaler.createRepeated(logRecordData.getAttributes()); + KeyValueMarshaler.createForAttributes(logRecordData.getAttributes()); - // For now, map all the bodies to String AnyValue. - StringAnyValueMarshaler anyValueMarshaler = - new StringAnyValueMarshaler(MarshalerUtil.toBytes(logRecordData.getBody().asString())); + MarshalerWithSize bodyMarshaler = + logRecordData.getBodyValue() == null + ? null + : AnyValueMarshaler.create(logRecordData.getBodyValue()); SpanContext spanContext = logRecordData.getSpanContext(); return new LogMarshaler( @@ -51,12 +55,15 @@ static LogMarshaler create(LogRecordData logRecordData) { logRecordData.getObservedTimestampEpochNanos(), toProtoSeverityNumber(logRecordData.getSeverity()), MarshalerUtil.toBytes(logRecordData.getSeverityText()), - anyValueMarshaler, + bodyMarshaler, attributeMarshalers, logRecordData.getTotalAttributeCount() - logRecordData.getAttributes().size(), spanContext.getTraceFlags(), spanContext.getTraceId().equals(INVALID_TRACE_ID) ? null : spanContext.getTraceId(), - spanContext.getSpanId().equals(INVALID_SPAN_ID) ? null : spanContext.getSpanId()); + spanContext.getSpanId().equals(INVALID_SPAN_ID) ? null : spanContext.getSpanId(), + logRecordData instanceof ExtendedLogRecordData + ? MarshalerUtil.toBytes(((ExtendedLogRecordData) logRecordData).getEventName()) + : EMPTY_BYTES); } private LogMarshaler( @@ -64,12 +71,13 @@ private LogMarshaler( long observedTimeUnixNano, ProtoEnumInfo severityNumber, byte[] severityText, - MarshalerWithSize anyValueMarshaler, + @Nullable MarshalerWithSize anyValueMarshaler, KeyValueMarshaler[] attributeMarshalers, int droppedAttributesCount, TraceFlags traceFlags, @Nullable String traceId, - @Nullable String spanId) { + @Nullable String spanId, + byte[] eventName) { super( calculateSize( timeUnixNano, @@ -81,7 +89,8 @@ private LogMarshaler( droppedAttributesCount, traceFlags, traceId, - spanId)); + spanId, + eventName)); this.timeUnixNano = timeUnixNano; this.observedTimeUnixNano = observedTimeUnixNano; this.traceId = traceId; @@ -92,6 +101,7 @@ private LogMarshaler( this.anyValueMarshaler = anyValueMarshaler; this.attributeMarshalers = attributeMarshalers; this.droppedAttributesCount = droppedAttributesCount; + this.eventName = eventName; } @Override @@ -104,14 +114,18 @@ protected void writeTo(Serializer output) throws IOException { output.serializeString(LogRecord.SEVERITY_TEXT, severityText); - output.serializeMessage(LogRecord.BODY, anyValueMarshaler); + if (anyValueMarshaler != null) { + output.serializeMessage(LogRecord.BODY, anyValueMarshaler); + } output.serializeRepeatedMessage(LogRecord.ATTRIBUTES, attributeMarshalers); output.serializeUInt32(LogRecord.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); - output.serializeFixed32(LogRecord.FLAGS, toUnsignedInt(traceFlags.asByte())); + output.serializeByteAsFixed32(LogRecord.FLAGS, traceFlags.asByte()); output.serializeTraceId(LogRecord.TRACE_ID, traceId); output.serializeSpanId(LogRecord.SPAN_ID, spanId); + + output.serializeString(LogRecord.EVENT_NAME, eventName); } private static int calculateSize( @@ -119,12 +133,13 @@ private static int calculateSize( long observedTimeUnixNano, ProtoEnumInfo severityNumber, byte[] severityText, - MarshalerWithSize anyValueMarshaler, + @Nullable MarshalerWithSize anyValueMarshaler, KeyValueMarshaler[] attributeMarshalers, int droppedAttributesCount, TraceFlags traceFlags, @Nullable String traceId, - @Nullable String spanId) { + @Nullable String spanId, + byte[] eventName) { int size = 0; size += MarshalerUtil.sizeFixed64(LogRecord.TIME_UNIX_NANO, timeUnixNano); @@ -134,14 +149,18 @@ private static int calculateSize( size += MarshalerUtil.sizeBytes(LogRecord.SEVERITY_TEXT, severityText); - size += MarshalerUtil.sizeMessage(LogRecord.BODY, anyValueMarshaler); + if (anyValueMarshaler != null) { + size += MarshalerUtil.sizeMessage(LogRecord.BODY, anyValueMarshaler); + } size += MarshalerUtil.sizeRepeatedMessage(LogRecord.ATTRIBUTES, attributeMarshalers); size += MarshalerUtil.sizeUInt32(LogRecord.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); - size += MarshalerUtil.sizeFixed32(LogRecord.FLAGS, toUnsignedInt(traceFlags.asByte())); + size += MarshalerUtil.sizeByteAsFixed32(LogRecord.FLAGS, traceFlags.asByte()); size += MarshalerUtil.sizeTraceId(LogRecord.TRACE_ID, traceId); size += MarshalerUtil.sizeSpanId(LogRecord.SPAN_ID, spanId); + + size += MarshalerUtil.sizeBytes(LogRecord.EVENT_NAME, eventName); return size; } @@ -202,9 +221,4 @@ static ProtoEnumInfo toProtoSeverityNumber(Severity severity) { // NB: Should not be possible with aligned versions. return SeverityNumber.SEVERITY_NUMBER_UNSPECIFIED; } - - /** Vendored {@link Byte#toUnsignedInt(byte)} to support Android. */ - private static int toUnsignedInt(byte x) { - return ((int) x) & 0xff; - } } diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LogReusableDataMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LogReusableDataMarshaler.java new file mode 100644 index 00000000000..fadaaa09bc4 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LogReusableDataMarshaler.java @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.logs; + +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import java.util.Collection; +import java.util.Deque; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.function.BiFunction; + +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ +public class LogReusableDataMarshaler { + + private final Deque marshalerPool = + new ConcurrentLinkedDeque<>(); + + private final MemoryMode memoryMode; + private final BiFunction doExport; + + public LogReusableDataMarshaler( + MemoryMode memoryMode, BiFunction doExport) { + this.memoryMode = memoryMode; + this.doExport = doExport; + } + + public MemoryMode getMemoryMode() { + return memoryMode; + } + + public CompletableResultCode export(Collection logs) { + if (memoryMode == MemoryMode.REUSABLE_DATA) { + LowAllocationLogsRequestMarshaler marshaler = marshalerPool.poll(); + if (marshaler == null) { + marshaler = new LowAllocationLogsRequestMarshaler(); + } + LowAllocationLogsRequestMarshaler exportMarshaler = marshaler; + exportMarshaler.initialize(logs); + return doExport + .apply(exportMarshaler, logs.size()) + .whenComplete( + () -> { + exportMarshaler.reset(); + marshalerPool.add(exportMarshaler); + }); + } + // MemoryMode == MemoryMode.IMMUTABLE_DATA + LogsRequestMarshaler request = LogsRequestMarshaler.create(logs); + return doExport.apply(request, logs.size()); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LogStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LogStatelessMarshaler.java new file mode 100644 index 00000000000..1477af88ce6 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LogStatelessMarshaler.java @@ -0,0 +1,109 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.logs; + +import static io.opentelemetry.exporter.internal.otlp.logs.LogMarshaler.toProtoSeverityNumber; + +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.SpanId; +import io.opentelemetry.api.trace.TraceId; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.AnyValueStatelessMarshaler; +import io.opentelemetry.exporter.internal.otlp.AttributeKeyValueStatelessMarshaler; +import io.opentelemetry.proto.logs.v1.internal.LogRecord; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.data.internal.ExtendedLogRecordData; +import java.io.IOException; + +/** See {@link LogMarshaler}. */ +final class LogStatelessMarshaler implements StatelessMarshaler { + private static final String INVALID_TRACE_ID = TraceId.getInvalid(); + private static final String INVALID_SPAN_ID = SpanId.getInvalid(); + static final LogStatelessMarshaler INSTANCE = new LogStatelessMarshaler(); + + @Override + public void writeTo(Serializer output, LogRecordData log, MarshalerContext context) + throws IOException { + output.serializeFixed64(LogRecord.TIME_UNIX_NANO, log.getTimestampEpochNanos()); + output.serializeFixed64( + LogRecord.OBSERVED_TIME_UNIX_NANO, log.getObservedTimestampEpochNanos()); + output.serializeEnum(LogRecord.SEVERITY_NUMBER, toProtoSeverityNumber(log.getSeverity())); + output.serializeStringWithContext(LogRecord.SEVERITY_TEXT, log.getSeverityText(), context); + if (log.getBodyValue() != null) { + output.serializeMessageWithContext( + LogRecord.BODY, log.getBodyValue(), AnyValueStatelessMarshaler.INSTANCE, context); + } + output.serializeRepeatedMessageWithContext( + LogRecord.ATTRIBUTES, + log.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + int droppedAttributesCount = log.getTotalAttributeCount() - log.getAttributes().size(); + output.serializeUInt32(LogRecord.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); + + SpanContext spanContext = log.getSpanContext(); + output.serializeFixed32(LogRecord.FLAGS, spanContext.getTraceFlags().asByte()); + if (!spanContext.getTraceId().equals(INVALID_TRACE_ID)) { + output.serializeTraceId(LogRecord.TRACE_ID, spanContext.getTraceId(), context); + } + if (!spanContext.getSpanId().equals(INVALID_SPAN_ID)) { + output.serializeSpanId(LogRecord.SPAN_ID, spanContext.getSpanId(), context); + } + if (log instanceof ExtendedLogRecordData) { + output.serializeStringWithContext( + LogRecord.EVENT_NAME, ((ExtendedLogRecordData) log).getEventName(), context); + } + } + + @Override + public int getBinarySerializedSize(LogRecordData log, MarshalerContext context) { + int size = 0; + + size += MarshalerUtil.sizeFixed64(LogRecord.TIME_UNIX_NANO, log.getTimestampEpochNanos()); + size += + MarshalerUtil.sizeFixed64( + LogRecord.OBSERVED_TIME_UNIX_NANO, log.getObservedTimestampEpochNanos()); + size += + MarshalerUtil.sizeEnum(LogRecord.SEVERITY_NUMBER, toProtoSeverityNumber(log.getSeverity())); + size += + StatelessMarshalerUtil.sizeStringWithContext( + LogRecord.SEVERITY_TEXT, log.getSeverityText(), context); + if (log.getBodyValue() != null) { + size += + StatelessMarshalerUtil.sizeMessageWithContext( + LogRecord.BODY, log.getBodyValue(), AnyValueStatelessMarshaler.INSTANCE, context); + } + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + LogRecord.ATTRIBUTES, + log.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + int droppedAttributesCount = log.getTotalAttributeCount() - log.getAttributes().size(); + size += MarshalerUtil.sizeUInt32(LogRecord.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); + + SpanContext spanContext = log.getSpanContext(); + size += MarshalerUtil.sizeFixed32(LogRecord.FLAGS, spanContext.getTraceFlags().asByte()); + if (!spanContext.getTraceId().equals(INVALID_TRACE_ID)) { + size += MarshalerUtil.sizeTraceId(LogRecord.TRACE_ID, spanContext.getTraceId()); + } + if (!spanContext.getSpanId().equals(INVALID_SPAN_ID)) { + size += MarshalerUtil.sizeSpanId(LogRecord.SPAN_ID, spanContext.getSpanId()); + } + + if (log instanceof ExtendedLogRecordData) { + size += + StatelessMarshalerUtil.sizeStringWithContext( + LogRecord.EVENT_NAME, ((ExtendedLogRecordData) log).getEventName(), context); + } + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LowAllocationLogsRequestMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LowAllocationLogsRequestMarshaler.java new file mode 100644 index 00000000000..f3bee53f061 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/LowAllocationLogsRequestMarshaler.java @@ -0,0 +1,107 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.logs; + +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.collector.logs.v1.internal.ExportLogsServiceRequest; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.resources.Resource; +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * {@link Marshaler} to convert SDK {@link LogRecordData} to OTLP ExportLogsServiceRequest. See + * {@link LogsRequestMarshaler}. + * + *

Example usage: + * + *

{@code
+ * void marshal(LowAllocationLogRequestMarshaler requestMarshaler, OutputStream output,
+ *     List logDataList) throws IOException {
+ *   requestMarshaler.initialize(logDataList);
+ *   try {
+ *     requestMarshaler.writeBinaryTo(output);
+ *   } finally {
+ *     requestMarshaler.reset();
+ *   }
+ * }
+ * }
+ * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class LowAllocationLogsRequestMarshaler extends Marshaler { + private static final MarshalerContext.Key RESOURCE_LOG_SIZE_CALCULATOR_KEY = + MarshalerContext.key(); + private static final MarshalerContext.Key RESOURCE_LOG_WRITER_KEY = MarshalerContext.key(); + + private final MarshalerContext context = new MarshalerContext(); + + @SuppressWarnings("NullAway") + private Map>> resourceAndScopeMap; + + private int size; + + public void initialize(Collection logDataList) { + resourceAndScopeMap = groupByResourceAndScope(context, logDataList); + size = calculateSize(context, resourceAndScopeMap); + } + + public void reset() { + context.reset(); + } + + @Override + public int getBinarySerializedSize() { + return size; + } + + @Override + public void writeTo(Serializer output) throws IOException { + // serializing can be retried, reset the indexes, so we could call writeTo multiple times + context.resetReadIndex(); + output.serializeRepeatedMessageWithContext( + ExportLogsServiceRequest.RESOURCE_LOGS, + resourceAndScopeMap, + ResourceLogsStatelessMarshaler.INSTANCE, + context, + RESOURCE_LOG_WRITER_KEY); + } + + private static int calculateSize( + MarshalerContext context, + Map>> resourceAndScopeMap) { + return StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ExportLogsServiceRequest.RESOURCE_LOGS, + resourceAndScopeMap, + ResourceLogsStatelessMarshaler.INSTANCE, + context, + RESOURCE_LOG_SIZE_CALCULATOR_KEY); + } + + private static Map>> + groupByResourceAndScope(MarshalerContext context, Collection logDataList) { + + if (logDataList.isEmpty()) { + return Collections.emptyMap(); + } + + return StatelessMarshalerUtil.groupByResourceAndScope( + logDataList, + // TODO(anuraaga): Replace with an internal SdkData type of interface that exposes these + // two. + LogRecordData::getResource, + LogRecordData::getInstrumentationScopeInfo, + context); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/ResourceLogsStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/ResourceLogsStatelessMarshaler.java new file mode 100644 index 00000000000..ba128c880c7 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/logs/ResourceLogsStatelessMarshaler.java @@ -0,0 +1,80 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.logs; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler2; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.ResourceMarshaler; +import io.opentelemetry.proto.logs.v1.internal.ResourceLogs; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.resources.Resource; +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * A Marshaler of ResourceLogs. See {@link ResourceLogsMarshaler}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class ResourceLogsStatelessMarshaler + implements StatelessMarshaler2>> { + static final ResourceLogsStatelessMarshaler INSTANCE = new ResourceLogsStatelessMarshaler(); + private static final MarshalerContext.Key SCOPE_LOG_WRITER_KEY = MarshalerContext.key(); + private static final MarshalerContext.Key SCOPE_LOG_SIZE_CALCULATOR_KEY = MarshalerContext.key(); + + @Override + public void writeTo( + Serializer output, + Resource resource, + Map> scopeMap, + MarshalerContext context) + throws IOException { + ResourceMarshaler resourceMarshaler = context.getData(ResourceMarshaler.class); + output.serializeMessage(ResourceLogs.RESOURCE, resourceMarshaler); + + output.serializeRepeatedMessageWithContext( + ResourceLogs.SCOPE_LOGS, + scopeMap, + InstrumentationScopeLogsStatelessMarshaler.INSTANCE, + context, + SCOPE_LOG_WRITER_KEY); + + output.serializeStringWithContext(ResourceLogs.SCHEMA_URL, resource.getSchemaUrl(), context); + } + + @Override + public int getBinarySerializedSize( + Resource resource, + Map> scopeMap, + MarshalerContext context) { + + int size = 0; + + ResourceMarshaler resourceMarshaler = ResourceMarshaler.create(resource); + context.addData(resourceMarshaler); + size += MarshalerUtil.sizeMessage(ResourceLogs.RESOURCE, resourceMarshaler); + + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ResourceLogs.SCOPE_LOGS, + scopeMap, + InstrumentationScopeLogsStatelessMarshaler.INSTANCE, + context, + SCOPE_LOG_SIZE_CALCULATOR_KEY); + + size += + StatelessMarshalerUtil.sizeStringWithContext( + ResourceLogs.SCHEMA_URL, resource.getSchemaUrl(), context); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExemplarMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExemplarMarshaler.java index ed4956ec9a8..1e0ec8d25cb 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExemplarMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExemplarMarshaler.java @@ -37,22 +37,15 @@ static ExemplarMarshaler[] createRepeated(List exemplars return marshalers; } - private static ExemplarMarshaler create(ExemplarData exemplar) { + // Visible for testing + static ExemplarMarshaler create(ExemplarData exemplar) { KeyValueMarshaler[] attributeMarshalers = - KeyValueMarshaler.createRepeated(exemplar.getFilteredAttributes()); - - ProtoFieldInfo valueField; - if (exemplar instanceof LongExemplarData) { - valueField = io.opentelemetry.proto.metrics.v1.internal.Exemplar.AS_INT; - } else { - assert exemplar instanceof DoubleExemplarData; - valueField = io.opentelemetry.proto.metrics.v1.internal.Exemplar.AS_DOUBLE; - } + KeyValueMarshaler.createForAttributes(exemplar.getFilteredAttributes()); return new ExemplarMarshaler( exemplar.getEpochNanos(), exemplar, - valueField, + toProtoExemplarValueType(exemplar), exemplar.getSpanContext(), attributeMarshalers); } @@ -121,4 +114,13 @@ private static int calculateSize( filteredAttributeMarshalers); return size; } + + static ProtoFieldInfo toProtoExemplarValueType(ExemplarData exemplar) { + if (exemplar instanceof LongExemplarData) { + return io.opentelemetry.proto.metrics.v1.internal.Exemplar.AS_INT; + } else { + assert exemplar instanceof DoubleExemplarData; + return io.opentelemetry.proto.metrics.v1.internal.Exemplar.AS_DOUBLE; + } + } } diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExemplarStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExemplarStatelessMarshaler.java new file mode 100644 index 00000000000..9959beb7160 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExemplarStatelessMarshaler.java @@ -0,0 +1,93 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import static io.opentelemetry.exporter.internal.otlp.metrics.ExemplarMarshaler.toProtoExemplarValueType; + +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.ProtoFieldInfo; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.AttributeKeyValueStatelessMarshaler; +import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; +import io.opentelemetry.sdk.metrics.data.ExemplarData; +import io.opentelemetry.sdk.metrics.data.LongExemplarData; +import java.io.IOException; + +/** See {@link ExemplarMarshaler}. */ +final class ExemplarStatelessMarshaler implements StatelessMarshaler { + static final ExemplarStatelessMarshaler INSTANCE = new ExemplarStatelessMarshaler(); + + private ExemplarStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, ExemplarData exemplar, MarshalerContext context) + throws IOException { + output.serializeFixed64( + io.opentelemetry.proto.metrics.v1.internal.Exemplar.TIME_UNIX_NANO, + exemplar.getEpochNanos()); + ProtoFieldInfo valueField = toProtoExemplarValueType(exemplar); + if (valueField == io.opentelemetry.proto.metrics.v1.internal.Exemplar.AS_INT) { + output.serializeFixed64Optional(valueField, ((LongExemplarData) exemplar).getValue()); + } else { + output.serializeDoubleOptional(valueField, ((DoubleExemplarData) exemplar).getValue()); + } + SpanContext spanContext = exemplar.getSpanContext(); + if (spanContext.isValid()) { + output.serializeSpanId( + io.opentelemetry.proto.metrics.v1.internal.Exemplar.SPAN_ID, + spanContext.getSpanId(), + context); + output.serializeTraceId( + io.opentelemetry.proto.metrics.v1.internal.Exemplar.TRACE_ID, + spanContext.getTraceId(), + context); + } + output.serializeRepeatedMessageWithContext( + io.opentelemetry.proto.metrics.v1.internal.Exemplar.FILTERED_ATTRIBUTES, + exemplar.getFilteredAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + } + + @Override + public int getBinarySerializedSize(ExemplarData exemplar, MarshalerContext context) { + int size = 0; + size += + MarshalerUtil.sizeFixed64( + io.opentelemetry.proto.metrics.v1.internal.Exemplar.TIME_UNIX_NANO, + exemplar.getEpochNanos()); + ProtoFieldInfo valueField = toProtoExemplarValueType(exemplar); + if (valueField == io.opentelemetry.proto.metrics.v1.internal.Exemplar.AS_INT) { + size += + MarshalerUtil.sizeFixed64Optional(valueField, ((LongExemplarData) exemplar).getValue()); + } else { + size += + MarshalerUtil.sizeDoubleOptional(valueField, ((DoubleExemplarData) exemplar).getValue()); + } + SpanContext spanContext = exemplar.getSpanContext(); + if (spanContext.isValid()) { + size += + MarshalerUtil.sizeSpanId( + io.opentelemetry.proto.metrics.v1.internal.Exemplar.SPAN_ID, spanContext.getSpanId()); + size += + MarshalerUtil.sizeTraceId( + io.opentelemetry.proto.metrics.v1.internal.Exemplar.TRACE_ID, + spanContext.getTraceId()); + } + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + io.opentelemetry.proto.metrics.v1.internal.Exemplar.FILTERED_ATTRIBUTES, + exemplar.getFilteredAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramBucketsMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramBucketsMarshaler.java index 54bb047398c..01cbee4c1b7 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramBucketsMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramBucketsMarshaler.java @@ -9,6 +9,7 @@ import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; import io.opentelemetry.exporter.internal.marshal.Serializer; import io.opentelemetry.proto.metrics.v1.internal.ExponentialHistogramDataPoint; +import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList; import io.opentelemetry.sdk.internal.PrimitiveLongList; import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; import java.io.IOException; @@ -35,16 +36,29 @@ private ExponentialHistogramBucketsMarshaler(int offset, List counts) { @Override protected void writeTo(Serializer output) throws IOException { output.serializeSInt32(ExponentialHistogramDataPoint.Buckets.OFFSET, offset); - output.serializeRepeatedUInt64( - ExponentialHistogramDataPoint.Buckets.BUCKET_COUNTS, PrimitiveLongList.toArray(counts)); + if (counts instanceof DynamicPrimitiveLongList) { + output.serializeRepeatedUInt64( + ExponentialHistogramDataPoint.Buckets.BUCKET_COUNTS, (DynamicPrimitiveLongList) counts); + } else { + output.serializeRepeatedUInt64( + ExponentialHistogramDataPoint.Buckets.BUCKET_COUNTS, PrimitiveLongList.toArray(counts)); + } } - private static int calculateSize(int offset, List counts) { + static int calculateSize(int offset, List counts) { int size = 0; size += MarshalerUtil.sizeSInt32(ExponentialHistogramDataPoint.Buckets.OFFSET, offset); - size += - MarshalerUtil.sizeRepeatedUInt64( - ExponentialHistogramDataPoint.Buckets.BUCKET_COUNTS, PrimitiveLongList.toArray(counts)); + if (counts instanceof DynamicPrimitiveLongList) { + size += + MarshalerUtil.sizeRepeatedUInt64( + ExponentialHistogramDataPoint.Buckets.BUCKET_COUNTS, + (DynamicPrimitiveLongList) counts); + } else { + size += + MarshalerUtil.sizeRepeatedUInt64( + ExponentialHistogramDataPoint.Buckets.BUCKET_COUNTS, + PrimitiveLongList.toArray(counts)); + } return size; } } diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramBucketsStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramBucketsStatelessMarshaler.java new file mode 100644 index 00000000000..01626906b42 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramBucketsStatelessMarshaler.java @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.proto.metrics.v1.internal.ExponentialHistogramDataPoint; +import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList; +import io.opentelemetry.sdk.internal.PrimitiveLongList; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; +import java.io.IOException; +import java.util.List; + +/** See {@link ExponentialHistogramBucketsMarshaler}. */ +final class ExponentialHistogramBucketsStatelessMarshaler + implements StatelessMarshaler { + static final ExponentialHistogramBucketsStatelessMarshaler INSTANCE = + new ExponentialHistogramBucketsStatelessMarshaler(); + + private ExponentialHistogramBucketsStatelessMarshaler() {} + + @Override + public void writeTo( + Serializer output, ExponentialHistogramBuckets buckets, MarshalerContext context) + throws IOException { + output.serializeSInt32(ExponentialHistogramDataPoint.Buckets.OFFSET, buckets.getOffset()); + List counts = buckets.getBucketCounts(); + if (counts instanceof DynamicPrimitiveLongList) { + output.serializeRepeatedUInt64( + ExponentialHistogramDataPoint.Buckets.BUCKET_COUNTS, (DynamicPrimitiveLongList) counts); + } else { + output.serializeRepeatedUInt64( + ExponentialHistogramDataPoint.Buckets.BUCKET_COUNTS, PrimitiveLongList.toArray(counts)); + } + } + + @Override + public int getBinarySerializedSize( + ExponentialHistogramBuckets buckets, MarshalerContext context) { + return ExponentialHistogramBucketsMarshaler.calculateSize( + buckets.getOffset(), buckets.getBucketCounts()); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramDataPointMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramDataPointMarshaler.java index e7ac8dcd68b..071833954ad 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramDataPointMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramDataPointMarshaler.java @@ -83,7 +83,7 @@ private ExponentialHistogramDataPointMarshaler( } static ExponentialHistogramDataPointMarshaler create(ExponentialHistogramPointData point) { - KeyValueMarshaler[] attributes = KeyValueMarshaler.createRepeated(point.getAttributes()); + KeyValueMarshaler[] attributes = KeyValueMarshaler.createForAttributes(point.getAttributes()); ExemplarMarshaler[] exemplars = ExemplarMarshaler.createRepeated(point.getExemplars()); ExponentialHistogramBucketsMarshaler positiveBuckets = diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramDataPointStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramDataPointStatelessMarshaler.java new file mode 100644 index 00000000000..bbf2a1d6881 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramDataPointStatelessMarshaler.java @@ -0,0 +1,113 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.AttributeKeyValueStatelessMarshaler; +import io.opentelemetry.proto.metrics.v1.internal.ExponentialHistogramDataPoint; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData; +import java.io.IOException; + +/** See {@link ExponentialHistogramDataPointMarshaler}. */ +final class ExponentialHistogramDataPointStatelessMarshaler + implements StatelessMarshaler { + static final ExponentialHistogramDataPointStatelessMarshaler INSTANCE = + new ExponentialHistogramDataPointStatelessMarshaler(); + + private ExponentialHistogramDataPointStatelessMarshaler() {} + + @Override + public void writeTo( + Serializer output, ExponentialHistogramPointData point, MarshalerContext context) + throws IOException { + output.serializeFixed64( + ExponentialHistogramDataPoint.START_TIME_UNIX_NANO, point.getStartEpochNanos()); + output.serializeFixed64(ExponentialHistogramDataPoint.TIME_UNIX_NANO, point.getEpochNanos()); + output.serializeFixed64(ExponentialHistogramDataPoint.COUNT, point.getCount()); + output.serializeDouble(ExponentialHistogramDataPoint.SUM, point.getSum()); + if (point.hasMin()) { + output.serializeDoubleOptional(ExponentialHistogramDataPoint.MIN, point.getMin()); + } + if (point.hasMax()) { + output.serializeDoubleOptional(ExponentialHistogramDataPoint.MAX, point.getMax()); + } + output.serializeSInt32(ExponentialHistogramDataPoint.SCALE, point.getScale()); + output.serializeFixed64(ExponentialHistogramDataPoint.ZERO_COUNT, point.getZeroCount()); + output.serializeMessageWithContext( + ExponentialHistogramDataPoint.POSITIVE, + point.getPositiveBuckets(), + ExponentialHistogramBucketsStatelessMarshaler.INSTANCE, + context); + output.serializeMessageWithContext( + ExponentialHistogramDataPoint.NEGATIVE, + point.getNegativeBuckets(), + ExponentialHistogramBucketsStatelessMarshaler.INSTANCE, + context); + output.serializeRepeatedMessageWithContext( + ExponentialHistogramDataPoint.EXEMPLARS, + point.getExemplars(), + ExemplarStatelessMarshaler.INSTANCE, + context); + output.serializeRepeatedMessageWithContext( + ExponentialHistogramDataPoint.ATTRIBUTES, + point.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + } + + @Override + public int getBinarySerializedSize( + ExponentialHistogramPointData point, MarshalerContext context) { + int size = 0; + size += + MarshalerUtil.sizeFixed64( + ExponentialHistogramDataPoint.START_TIME_UNIX_NANO, point.getStartEpochNanos()); + size += + MarshalerUtil.sizeFixed64( + ExponentialHistogramDataPoint.TIME_UNIX_NANO, point.getEpochNanos()); + size += MarshalerUtil.sizeFixed64(ExponentialHistogramDataPoint.COUNT, point.getCount()); + size += MarshalerUtil.sizeDouble(ExponentialHistogramDataPoint.SUM, point.getSum()); + if (point.hasMin()) { + size += MarshalerUtil.sizeDoubleOptional(ExponentialHistogramDataPoint.MIN, point.getMin()); + } + if (point.hasMax()) { + size += MarshalerUtil.sizeDoubleOptional(ExponentialHistogramDataPoint.MAX, point.getMax()); + } + size += MarshalerUtil.sizeSInt32(ExponentialHistogramDataPoint.SCALE, point.getScale()); + size += + MarshalerUtil.sizeFixed64(ExponentialHistogramDataPoint.ZERO_COUNT, point.getZeroCount()); + size += + StatelessMarshalerUtil.sizeMessageWithContext( + ExponentialHistogramDataPoint.POSITIVE, + point.getPositiveBuckets(), + ExponentialHistogramBucketsStatelessMarshaler.INSTANCE, + context); + size += + StatelessMarshalerUtil.sizeMessageWithContext( + ExponentialHistogramDataPoint.NEGATIVE, + point.getNegativeBuckets(), + ExponentialHistogramBucketsStatelessMarshaler.INSTANCE, + context); + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ExponentialHistogramDataPoint.EXEMPLARS, + point.getExemplars(), + ExemplarStatelessMarshaler.INSTANCE, + context); + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ExponentialHistogramDataPoint.ATTRIBUTES, + point.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramStatelessMarshaler.java new file mode 100644 index 00000000000..240f616a113 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ExponentialHistogramStatelessMarshaler.java @@ -0,0 +1,58 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.metrics.v1.internal.ExponentialHistogram; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramData; +import java.io.IOException; + +/** See {@link ExponentialHistogramMarshaler}. */ +final class ExponentialHistogramStatelessMarshaler + implements StatelessMarshaler { + static final ExponentialHistogramStatelessMarshaler INSTANCE = + new ExponentialHistogramStatelessMarshaler(); + private static final MarshalerContext.Key DATA_POINT_SIZE_CALCULATOR_KEY = MarshalerContext.key(); + private static final MarshalerContext.Key DATA_POINT_WRITER_KEY = MarshalerContext.key(); + + private ExponentialHistogramStatelessMarshaler() {} + + @Override + public void writeTo( + Serializer output, ExponentialHistogramData histogram, MarshalerContext context) + throws IOException { + output.serializeRepeatedMessageWithContext( + ExponentialHistogram.DATA_POINTS, + histogram.getPoints(), + ExponentialHistogramDataPointStatelessMarshaler.INSTANCE, + context, + DATA_POINT_WRITER_KEY); + output.serializeEnum( + ExponentialHistogram.AGGREGATION_TEMPORALITY, + MetricsMarshalerUtil.mapToTemporality(histogram.getAggregationTemporality())); + } + + @Override + public int getBinarySerializedSize(ExponentialHistogramData histogram, MarshalerContext context) { + int size = 0; + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ExponentialHistogram.DATA_POINTS, + histogram.getPoints(), + ExponentialHistogramDataPointStatelessMarshaler.INSTANCE, + context, + DATA_POINT_SIZE_CALCULATOR_KEY); + size += + MarshalerUtil.sizeEnum( + ExponentialHistogram.AGGREGATION_TEMPORALITY, + MetricsMarshalerUtil.mapToTemporality(histogram.getAggregationTemporality())); + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/GaugeStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/GaugeStatelessMarshaler.java new file mode 100644 index 00000000000..4db29e74e1c --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/GaugeStatelessMarshaler.java @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.metrics.v1.internal.Gauge; +import io.opentelemetry.sdk.metrics.data.GaugeData; +import io.opentelemetry.sdk.metrics.data.PointData; +import java.io.IOException; + +/** See {@link GaugeMarshaler}. */ +final class GaugeStatelessMarshaler implements StatelessMarshaler> { + static final GaugeStatelessMarshaler INSTANCE = new GaugeStatelessMarshaler(); + private static final MarshalerContext.Key DATA_POINT_SIZE_CALCULATOR_KEY = MarshalerContext.key(); + private static final MarshalerContext.Key DATA_POINT_WRITER_KEY = MarshalerContext.key(); + + private GaugeStatelessMarshaler() {} + + @Override + public void writeTo( + Serializer output, GaugeData gauge, MarshalerContext context) + throws IOException { + output.serializeRepeatedMessageWithContext( + Gauge.DATA_POINTS, + gauge.getPoints(), + NumberDataPointStatelessMarshaler.INSTANCE, + context, + DATA_POINT_WRITER_KEY); + } + + @Override + public int getBinarySerializedSize( + GaugeData gauge, MarshalerContext context) { + return StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + Gauge.DATA_POINTS, + gauge.getPoints(), + NumberDataPointStatelessMarshaler.INSTANCE, + context, + DATA_POINT_SIZE_CALCULATOR_KEY); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/HistogramDataPointMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/HistogramDataPointMarshaler.java index b1a027a3455..8022635e868 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/HistogramDataPointMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/HistogramDataPointMarshaler.java @@ -41,7 +41,7 @@ static HistogramDataPointMarshaler[] createRepeated(Collection { + static final HistogramDataPointStatelessMarshaler INSTANCE = + new HistogramDataPointStatelessMarshaler(); + + private HistogramDataPointStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, HistogramPointData point, MarshalerContext context) + throws IOException { + output.serializeFixed64(HistogramDataPoint.START_TIME_UNIX_NANO, point.getStartEpochNanos()); + output.serializeFixed64(HistogramDataPoint.TIME_UNIX_NANO, point.getEpochNanos()); + output.serializeFixed64(HistogramDataPoint.COUNT, point.getCount()); + output.serializeDoubleOptional(HistogramDataPoint.SUM, point.getSum()); + if (point.hasMin()) { + output.serializeDoubleOptional(HistogramDataPoint.MIN, point.getMin()); + } + if (point.hasMax()) { + output.serializeDoubleOptional(HistogramDataPoint.MAX, point.getMax()); + } + output.serializeRepeatedFixed64(HistogramDataPoint.BUCKET_COUNTS, point.getCounts()); + output.serializeRepeatedDouble(HistogramDataPoint.EXPLICIT_BOUNDS, point.getBoundaries()); + output.serializeRepeatedMessageWithContext( + HistogramDataPoint.EXEMPLARS, + point.getExemplars(), + ExemplarStatelessMarshaler.INSTANCE, + context); + output.serializeRepeatedMessageWithContext( + HistogramDataPoint.ATTRIBUTES, + point.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + } + + @Override + public int getBinarySerializedSize(HistogramPointData point, MarshalerContext context) { + int size = 0; + size += + MarshalerUtil.sizeFixed64( + HistogramDataPoint.START_TIME_UNIX_NANO, point.getStartEpochNanos()); + size += MarshalerUtil.sizeFixed64(HistogramDataPoint.TIME_UNIX_NANO, point.getEpochNanos()); + size += MarshalerUtil.sizeFixed64(HistogramDataPoint.COUNT, point.getCount()); + size += MarshalerUtil.sizeDoubleOptional(HistogramDataPoint.SUM, point.getSum()); + if (point.hasMin()) { + size += MarshalerUtil.sizeDoubleOptional(HistogramDataPoint.MIN, point.getMin()); + } + if (point.hasMax()) { + size += MarshalerUtil.sizeDoubleOptional(HistogramDataPoint.MAX, point.getMax()); + } + size += MarshalerUtil.sizeRepeatedFixed64(HistogramDataPoint.BUCKET_COUNTS, point.getCounts()); + size += + MarshalerUtil.sizeRepeatedDouble(HistogramDataPoint.EXPLICIT_BOUNDS, point.getBoundaries()); + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + HistogramDataPoint.EXEMPLARS, + point.getExemplars(), + ExemplarStatelessMarshaler.INSTANCE, + context); + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + HistogramDataPoint.ATTRIBUTES, + point.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/HistogramStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/HistogramStatelessMarshaler.java new file mode 100644 index 00000000000..2e3f65e188a --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/HistogramStatelessMarshaler.java @@ -0,0 +1,55 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.metrics.v1.internal.Histogram; +import io.opentelemetry.sdk.metrics.data.HistogramData; +import java.io.IOException; + +/** See {@link HistogramMarshaler}. */ +final class HistogramStatelessMarshaler implements StatelessMarshaler { + static final HistogramStatelessMarshaler INSTANCE = new HistogramStatelessMarshaler(); + private static final MarshalerContext.Key DATA_POINT_SIZE_CALCULATOR_KEY = MarshalerContext.key(); + private static final MarshalerContext.Key DATA_POINT_WRITER_KEY = MarshalerContext.key(); + + private HistogramStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, HistogramData histogram, MarshalerContext context) + throws IOException { + output.serializeRepeatedMessageWithContext( + Histogram.DATA_POINTS, + histogram.getPoints(), + HistogramDataPointStatelessMarshaler.INSTANCE, + context, + DATA_POINT_WRITER_KEY); + output.serializeEnum( + Histogram.AGGREGATION_TEMPORALITY, + MetricsMarshalerUtil.mapToTemporality(histogram.getAggregationTemporality())); + } + + @Override + public int getBinarySerializedSize(HistogramData histogram, MarshalerContext context) { + int size = 0; + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + Histogram.DATA_POINTS, + histogram.getPoints(), + HistogramDataPointStatelessMarshaler.INSTANCE, + context, + DATA_POINT_SIZE_CALCULATOR_KEY); + size += + MarshalerUtil.sizeEnum( + Histogram.AGGREGATION_TEMPORALITY, + MetricsMarshalerUtil.mapToTemporality(histogram.getAggregationTemporality())); + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/InstrumentationScopeMetricsStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/InstrumentationScopeMetricsStatelessMarshaler.java new file mode 100644 index 00000000000..1be87dbdd1e --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/InstrumentationScopeMetricsStatelessMarshaler.java @@ -0,0 +1,66 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler2; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.InstrumentationScopeMarshaler; +import io.opentelemetry.proto.metrics.v1.internal.ScopeMetrics; +import io.opentelemetry.proto.trace.v1.internal.ScopeSpans; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.data.MetricData; +import java.io.IOException; +import java.util.List; + +/** See {@link InstrumentationScopeMetricsMarshaler}. */ +final class InstrumentationScopeMetricsStatelessMarshaler + implements StatelessMarshaler2> { + static final InstrumentationScopeMetricsStatelessMarshaler INSTANCE = + new InstrumentationScopeMetricsStatelessMarshaler(); + + private InstrumentationScopeMetricsStatelessMarshaler() {} + + @Override + public void writeTo( + Serializer output, + InstrumentationScopeInfo instrumentationScope, + List metrics, + MarshalerContext context) + throws IOException { + InstrumentationScopeMarshaler instrumentationScopeMarshaler = + context.getData(InstrumentationScopeMarshaler.class); + + output.serializeMessage(ScopeMetrics.SCOPE, instrumentationScopeMarshaler); + output.serializeRepeatedMessageWithContext( + ScopeMetrics.METRICS, metrics, MetricStatelessMarshaler.INSTANCE, context); + output.serializeStringWithContext( + ScopeMetrics.SCHEMA_URL, instrumentationScope.getSchemaUrl(), context); + } + + @Override + public int getBinarySerializedSize( + InstrumentationScopeInfo instrumentationScope, + List metrics, + MarshalerContext context) { + InstrumentationScopeMarshaler instrumentationScopeMarshaler = + InstrumentationScopeMarshaler.create(instrumentationScope); + context.addData(instrumentationScopeMarshaler); + + int size = 0; + size += MarshalerUtil.sizeMessage(ScopeMetrics.SCOPE, instrumentationScopeMarshaler); + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ScopeMetrics.METRICS, metrics, MetricStatelessMarshaler.INSTANCE, context); + size += + StatelessMarshalerUtil.sizeStringWithContext( + ScopeSpans.SCHEMA_URL, instrumentationScope.getSchemaUrl(), context); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/LowAllocationMetricsRequestMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/LowAllocationMetricsRequestMarshaler.java new file mode 100644 index 00000000000..704642dc35a --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/LowAllocationMetricsRequestMarshaler.java @@ -0,0 +1,107 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.collector.metrics.v1.internal.ExportMetricsServiceRequest; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.resources.Resource; +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * {@link Marshaler} to convert SDK {@link MetricData} to OTLP ExportMetricsServiceRequest. See + * {@link MetricsRequestMarshaler}. + * + *

Example usage: + * + *

{@code
+ * void marshal(LowAllocationMetricsRequestMarshaler requestMarshaler, OutputStream output,
+ *     Collection metricDataList) throws IOException {
+ *   requestMarshaler.initialize(metricDataList);
+ *   try {
+ *     requestMarshaler.writeBinaryTo(output);
+ *   } finally {
+ *     requestMarshaler.reset();
+ *   }
+ * }
+ * }
+ * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class LowAllocationMetricsRequestMarshaler extends Marshaler { + private static final MarshalerContext.Key RESOURCE_METRIC_SIZE_CALCULATOR_KEY = + MarshalerContext.key(); + private static final MarshalerContext.Key RESOURCE_METRIC_WRITER_KEY = MarshalerContext.key(); + + private final MarshalerContext context = new MarshalerContext(); + + @SuppressWarnings("NullAway") + private Map>> resourceAndScopeMap; + + private int size; + + public void initialize(Collection metricDataList) { + resourceAndScopeMap = groupByResourceAndScope(context, metricDataList); + size = calculateSize(context, resourceAndScopeMap); + } + + public void reset() { + context.reset(); + } + + @Override + public int getBinarySerializedSize() { + return size; + } + + @Override + public void writeTo(Serializer output) throws IOException { + // serializing can be retried, reset the indexes, so we could call writeTo multiple times + context.resetReadIndex(); + output.serializeRepeatedMessageWithContext( + ExportMetricsServiceRequest.RESOURCE_METRICS, + resourceAndScopeMap, + ResourceMetricsStatelessMarshaler.INSTANCE, + context, + RESOURCE_METRIC_WRITER_KEY); + } + + private static int calculateSize( + MarshalerContext context, + Map>> resourceAndScopeMap) { + return StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ExportMetricsServiceRequest.RESOURCE_METRICS, + resourceAndScopeMap, + ResourceMetricsStatelessMarshaler.INSTANCE, + context, + RESOURCE_METRIC_SIZE_CALCULATOR_KEY); + } + + private static Map>> + groupByResourceAndScope(MarshalerContext context, Collection metricDataList) { + + if (metricDataList.isEmpty()) { + return Collections.emptyMap(); + } + + return StatelessMarshalerUtil.groupByResourceAndScope( + metricDataList, + // TODO(anuraaga): Replace with an internal SdkData type of interface that exposes these + // two. + MetricData::getResource, + MetricData::getInstrumentationScopeInfo, + context); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/MetricReusableDataMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/MetricReusableDataMarshaler.java new file mode 100644 index 00000000000..a3d7187698b --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/MetricReusableDataMarshaler.java @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.data.MetricData; +import java.util.Collection; +import java.util.Deque; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.function.BiFunction; + +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ +public class MetricReusableDataMarshaler { + + private final Deque marshalerPool = + new ConcurrentLinkedDeque<>(); + + private final MemoryMode memoryMode; + private final BiFunction doExport; + + public MetricReusableDataMarshaler( + MemoryMode memoryMode, BiFunction doExport) { + this.memoryMode = memoryMode; + this.doExport = doExport; + } + + public MemoryMode getMemoryMode() { + return memoryMode; + } + + public CompletableResultCode export(Collection metrics) { + if (memoryMode == MemoryMode.REUSABLE_DATA) { + LowAllocationMetricsRequestMarshaler marshaler = marshalerPool.poll(); + if (marshaler == null) { + marshaler = new LowAllocationMetricsRequestMarshaler(); + } + LowAllocationMetricsRequestMarshaler exportMarshaler = marshaler; + exportMarshaler.initialize(metrics); + return doExport + .apply(exportMarshaler, metrics.size()) + .whenComplete( + () -> { + exportMarshaler.reset(); + marshalerPool.add(exportMarshaler); + }); + } + // MemoryMode == MemoryMode.IMMUTABLE_DATA + MetricsRequestMarshaler request = MetricsRequestMarshaler.create(metrics); + return doExport.apply(request, metrics.size()); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/MetricStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/MetricStatelessMarshaler.java new file mode 100644 index 00000000000..4a2868ebe2c --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/MetricStatelessMarshaler.java @@ -0,0 +1,212 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import static io.opentelemetry.sdk.metrics.data.MetricDataType.DOUBLE_GAUGE; +import static io.opentelemetry.sdk.metrics.data.MetricDataType.DOUBLE_SUM; +import static io.opentelemetry.sdk.metrics.data.MetricDataType.EXPONENTIAL_HISTOGRAM; +import static io.opentelemetry.sdk.metrics.data.MetricDataType.HISTOGRAM; +import static io.opentelemetry.sdk.metrics.data.MetricDataType.LONG_GAUGE; +import static io.opentelemetry.sdk.metrics.data.MetricDataType.LONG_SUM; +import static io.opentelemetry.sdk.metrics.data.MetricDataType.SUMMARY; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.metrics.v1.internal.Metric; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.data.MetricDataType; +import java.io.IOException; +import java.util.EnumMap; +import java.util.Map; + +/** See {@link MetricMarshaler}. */ +final class MetricStatelessMarshaler implements StatelessMarshaler { + static final MetricStatelessMarshaler INSTANCE = new MetricStatelessMarshaler(); + private static final Map> METRIC_MARSHALERS = + new EnumMap<>(MetricDataType.class); + + static { + METRIC_MARSHALERS.put( + LONG_GAUGE, + new StatelessMarshaler() { + @Override + public int getBinarySerializedSize(MetricData metricData, MarshalerContext context) { + return StatelessMarshalerUtil.sizeMessageWithContext( + Metric.GAUGE, + metricData.getLongGaugeData(), + GaugeStatelessMarshaler.INSTANCE, + context); + } + + @Override + public void writeTo(Serializer output, MetricData metric, MarshalerContext context) + throws IOException { + output.serializeMessageWithContext( + Metric.GAUGE, metric.getLongGaugeData(), GaugeStatelessMarshaler.INSTANCE, context); + } + }); + METRIC_MARSHALERS.put( + DOUBLE_GAUGE, + new StatelessMarshaler() { + @Override + public int getBinarySerializedSize(MetricData metricData, MarshalerContext context) { + return StatelessMarshalerUtil.sizeMessageWithContext( + Metric.GAUGE, + metricData.getDoubleGaugeData(), + GaugeStatelessMarshaler.INSTANCE, + context); + } + + @Override + public void writeTo(Serializer output, MetricData metric, MarshalerContext context) + throws IOException { + output.serializeMessageWithContext( + Metric.GAUGE, + metric.getDoubleGaugeData(), + GaugeStatelessMarshaler.INSTANCE, + context); + } + }); + METRIC_MARSHALERS.put( + LONG_SUM, + new StatelessMarshaler() { + @Override + public int getBinarySerializedSize(MetricData metricData, MarshalerContext context) { + return StatelessMarshalerUtil.sizeMessageWithContext( + Metric.SUM, metricData.getLongSumData(), SumStatelessMarshaler.INSTANCE, context); + } + + @Override + public void writeTo(Serializer output, MetricData metric, MarshalerContext context) + throws IOException { + output.serializeMessageWithContext( + Metric.SUM, metric.getLongSumData(), SumStatelessMarshaler.INSTANCE, context); + } + }); + METRIC_MARSHALERS.put( + DOUBLE_SUM, + new StatelessMarshaler() { + @Override + public int getBinarySerializedSize(MetricData metricData, MarshalerContext context) { + return StatelessMarshalerUtil.sizeMessageWithContext( + Metric.SUM, metricData.getDoubleSumData(), SumStatelessMarshaler.INSTANCE, context); + } + + @Override + public void writeTo(Serializer output, MetricData metric, MarshalerContext context) + throws IOException { + output.serializeMessageWithContext( + Metric.SUM, metric.getDoubleSumData(), SumStatelessMarshaler.INSTANCE, context); + } + }); + METRIC_MARSHALERS.put( + SUMMARY, + new StatelessMarshaler() { + @Override + public int getBinarySerializedSize(MetricData metricData, MarshalerContext context) { + return StatelessMarshalerUtil.sizeMessageWithContext( + Metric.SUMMARY, + metricData.getSummaryData(), + SummaryStatelessMarshaler.INSTANCE, + context); + } + + @Override + public void writeTo(Serializer output, MetricData metric, MarshalerContext context) + throws IOException { + output.serializeMessageWithContext( + Metric.SUMMARY, + metric.getSummaryData(), + SummaryStatelessMarshaler.INSTANCE, + context); + } + }); + METRIC_MARSHALERS.put( + HISTOGRAM, + new StatelessMarshaler() { + @Override + public int getBinarySerializedSize(MetricData metricData, MarshalerContext context) { + return StatelessMarshalerUtil.sizeMessageWithContext( + Metric.HISTOGRAM, + metricData.getHistogramData(), + HistogramStatelessMarshaler.INSTANCE, + context); + } + + @Override + public void writeTo(Serializer output, MetricData metric, MarshalerContext context) + throws IOException { + output.serializeMessageWithContext( + Metric.HISTOGRAM, + metric.getHistogramData(), + HistogramStatelessMarshaler.INSTANCE, + context); + } + }); + METRIC_MARSHALERS.put( + EXPONENTIAL_HISTOGRAM, + new StatelessMarshaler() { + @Override + public int getBinarySerializedSize(MetricData metricData, MarshalerContext context) { + return StatelessMarshalerUtil.sizeMessageWithContext( + Metric.EXPONENTIAL_HISTOGRAM, + metricData.getExponentialHistogramData(), + ExponentialHistogramStatelessMarshaler.INSTANCE, + context); + } + + @Override + public void writeTo(Serializer output, MetricData metric, MarshalerContext context) + throws IOException { + output.serializeMessageWithContext( + Metric.EXPONENTIAL_HISTOGRAM, + metric.getExponentialHistogramData(), + ExponentialHistogramStatelessMarshaler.INSTANCE, + context); + } + }); + } + + private MetricStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, MetricData metric, MarshalerContext context) + throws IOException { + StatelessMarshaler metricMarshaler = METRIC_MARSHALERS.get(metric.getType()); + if (metricMarshaler == null) { + // Someone not using BOM to align versions as we require. Just skip the metric. + return; + } + + output.serializeStringWithContext(Metric.NAME, metric.getName(), context); + output.serializeStringWithContext(Metric.DESCRIPTION, metric.getDescription(), context); + output.serializeStringWithContext(Metric.UNIT, metric.getUnit(), context); + + metricMarshaler.writeTo(output, metric, context); + } + + @Override + public int getBinarySerializedSize(MetricData metric, MarshalerContext context) { + StatelessMarshaler metricMarshaler = METRIC_MARSHALERS.get(metric.getType()); + if (metricMarshaler == null) { + // Someone not using BOM to align versions as we require. Just skip the metric. + return 0; + } + + int size = 0; + size += StatelessMarshalerUtil.sizeStringWithContext(Metric.NAME, metric.getName(), context); + size += + StatelessMarshalerUtil.sizeStringWithContext( + Metric.DESCRIPTION, metric.getDescription(), context); + size += StatelessMarshalerUtil.sizeStringWithContext(Metric.UNIT, metric.getUnit(), context); + + size += metricMarshaler.getBinarySerializedSize(metric, context); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/NumberDataPointMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/NumberDataPointMarshaler.java index 3230f64873c..7edd3a76a8f 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/NumberDataPointMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/NumberDataPointMarshaler.java @@ -40,21 +40,13 @@ static NumberDataPointMarshaler[] createRepeated(Collection static NumberDataPointMarshaler create(PointData point) { ExemplarMarshaler[] exemplarMarshalers = ExemplarMarshaler.createRepeated(point.getExemplars()); KeyValueMarshaler[] attributeMarshalers = - KeyValueMarshaler.createRepeated(point.getAttributes()); - - ProtoFieldInfo valueField; - if (point instanceof LongPointData) { - valueField = NumberDataPoint.AS_INT; - } else { - assert point instanceof DoublePointData; - valueField = NumberDataPoint.AS_DOUBLE; - } + KeyValueMarshaler.createForAttributes(point.getAttributes()); return new NumberDataPointMarshaler( point.getStartEpochNanos(), point.getEpochNanos(), point, - valueField, + toProtoPointValueType(point), exemplarMarshalers, attributeMarshalers); } @@ -107,4 +99,13 @@ private static int calculateSize( size += MarshalerUtil.sizeRepeatedMessage(NumberDataPoint.ATTRIBUTES, attributes); return size; } + + static ProtoFieldInfo toProtoPointValueType(PointData pointData) { + if (pointData instanceof LongPointData) { + return NumberDataPoint.AS_INT; + } else { + assert pointData instanceof DoublePointData; + return NumberDataPoint.AS_DOUBLE; + } + } } diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/NumberDataPointStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/NumberDataPointStatelessMarshaler.java new file mode 100644 index 00000000000..c907d59a7e7 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/NumberDataPointStatelessMarshaler.java @@ -0,0 +1,78 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import static io.opentelemetry.exporter.internal.otlp.metrics.NumberDataPointMarshaler.toProtoPointValueType; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.ProtoFieldInfo; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.AttributeKeyValueStatelessMarshaler; +import io.opentelemetry.proto.metrics.v1.internal.NumberDataPoint; +import io.opentelemetry.sdk.metrics.data.DoublePointData; +import io.opentelemetry.sdk.metrics.data.LongPointData; +import io.opentelemetry.sdk.metrics.data.PointData; +import java.io.IOException; + +/** See {@link NumberDataPointMarshaler}. */ +final class NumberDataPointStatelessMarshaler implements StatelessMarshaler { + static final NumberDataPointStatelessMarshaler INSTANCE = new NumberDataPointStatelessMarshaler(); + + private NumberDataPointStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, PointData point, MarshalerContext context) + throws IOException { + output.serializeFixed64(NumberDataPoint.START_TIME_UNIX_NANO, point.getStartEpochNanos()); + output.serializeFixed64(NumberDataPoint.TIME_UNIX_NANO, point.getEpochNanos()); + ProtoFieldInfo valueField = toProtoPointValueType(point); + if (valueField == NumberDataPoint.AS_INT) { + output.serializeFixed64Optional(valueField, ((LongPointData) point).getValue()); + } else { + output.serializeDoubleOptional(valueField, ((DoublePointData) point).getValue()); + } + output.serializeRepeatedMessageWithContext( + NumberDataPoint.EXEMPLARS, + point.getExemplars(), + ExemplarStatelessMarshaler.INSTANCE, + context); + output.serializeRepeatedMessageWithContext( + NumberDataPoint.ATTRIBUTES, + point.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + } + + @Override + public int getBinarySerializedSize(PointData point, MarshalerContext context) { + int size = 0; + size += + MarshalerUtil.sizeFixed64(NumberDataPoint.START_TIME_UNIX_NANO, point.getStartEpochNanos()); + size += MarshalerUtil.sizeFixed64(NumberDataPoint.TIME_UNIX_NANO, point.getEpochNanos()); + ProtoFieldInfo valueField = toProtoPointValueType(point); + if (valueField == NumberDataPoint.AS_INT) { + size += MarshalerUtil.sizeFixed64Optional(valueField, ((LongPointData) point).getValue()); + } else { + size += MarshalerUtil.sizeDoubleOptional(valueField, ((DoublePointData) point).getValue()); + } + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + NumberDataPoint.EXEMPLARS, + point.getExemplars(), + ExemplarStatelessMarshaler.INSTANCE, + context); + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + NumberDataPoint.ATTRIBUTES, + point.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ResourceMetricsStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ResourceMetricsStatelessMarshaler.java new file mode 100644 index 00000000000..e25ab330587 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ResourceMetricsStatelessMarshaler.java @@ -0,0 +1,83 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler2; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.ResourceMarshaler; +import io.opentelemetry.proto.metrics.v1.internal.ResourceMetrics; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.resources.Resource; +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * A Marshaler of ResourceMetrics. See {@link ResourceMetricsMarshaler}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class ResourceMetricsStatelessMarshaler + implements StatelessMarshaler2>> { + static final ResourceMetricsStatelessMarshaler INSTANCE = new ResourceMetricsStatelessMarshaler(); + private static final MarshalerContext.Key SCOPE_METRIC_WRITER_KEY = MarshalerContext.key(); + private static final MarshalerContext.Key SCOPE_METRIC_SIZE_CALCULATOR_KEY = + MarshalerContext.key(); + + private ResourceMetricsStatelessMarshaler() {} + + @Override + public void writeTo( + Serializer output, + Resource resource, + Map> scopeMap, + MarshalerContext context) + throws IOException { + ResourceMarshaler resourceMarshaler = context.getData(ResourceMarshaler.class); + output.serializeMessage(ResourceMetrics.RESOURCE, resourceMarshaler); + + output.serializeRepeatedMessageWithContext( + ResourceMetrics.SCOPE_METRICS, + scopeMap, + InstrumentationScopeMetricsStatelessMarshaler.INSTANCE, + context, + SCOPE_METRIC_WRITER_KEY); + + output.serializeStringWithContext(ResourceMetrics.SCHEMA_URL, resource.getSchemaUrl(), context); + } + + @Override + public int getBinarySerializedSize( + Resource resource, + Map> scopeMap, + MarshalerContext context) { + + int size = 0; + + ResourceMarshaler resourceMarshaler = ResourceMarshaler.create(resource); + context.addData(resourceMarshaler); + size += MarshalerUtil.sizeMessage(ResourceMetrics.RESOURCE, resourceMarshaler); + + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ResourceMetrics.SCOPE_METRICS, + scopeMap, + InstrumentationScopeMetricsStatelessMarshaler.INSTANCE, + context, + SCOPE_METRIC_SIZE_CALCULATOR_KEY); + + size += + StatelessMarshalerUtil.sizeStringWithContext( + ResourceMetrics.SCHEMA_URL, resource.getSchemaUrl(), context); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SumStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SumStatelessMarshaler.java new file mode 100644 index 00000000000..9a4ce569bee --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SumStatelessMarshaler.java @@ -0,0 +1,56 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.metrics.v1.internal.Sum; +import io.opentelemetry.sdk.metrics.data.PointData; +import io.opentelemetry.sdk.metrics.data.SumData; +import java.io.IOException; + +/** See {@link SumMarshaler}. */ +final class SumStatelessMarshaler implements StatelessMarshaler> { + static final SumStatelessMarshaler INSTANCE = new SumStatelessMarshaler(); + private static final MarshalerContext.Key DATA_POINT_SIZE_CALCULATOR_KEY = MarshalerContext.key(); + private static final MarshalerContext.Key DATA_POINT_WRITER_KEY = MarshalerContext.key(); + + @Override + public void writeTo(Serializer output, SumData sum, MarshalerContext context) + throws IOException { + output.serializeRepeatedMessageWithContext( + Sum.DATA_POINTS, + sum.getPoints(), + NumberDataPointStatelessMarshaler.INSTANCE, + context, + DATA_POINT_WRITER_KEY); + output.serializeEnum( + Sum.AGGREGATION_TEMPORALITY, + MetricsMarshalerUtil.mapToTemporality(sum.getAggregationTemporality())); + output.serializeBool(Sum.IS_MONOTONIC, sum.isMonotonic()); + } + + @Override + public int getBinarySerializedSize(SumData sum, MarshalerContext context) { + int size = 0; + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + Sum.DATA_POINTS, + sum.getPoints(), + NumberDataPointStatelessMarshaler.INSTANCE, + context, + DATA_POINT_SIZE_CALCULATOR_KEY); + size += + MarshalerUtil.sizeEnum( + Sum.AGGREGATION_TEMPORALITY, + MetricsMarshalerUtil.mapToTemporality(sum.getAggregationTemporality())); + size += MarshalerUtil.sizeBool(Sum.IS_MONOTONIC, sum.isMonotonic()); + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SummaryDataPointMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SummaryDataPointMarshaler.java index cb38c86ed13..c0b6a1dcded 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SummaryDataPointMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SummaryDataPointMarshaler.java @@ -20,7 +20,7 @@ final class SummaryDataPointMarshaler extends MarshalerWithSize { private final long count; private final double sum; private final ValueAtQuantileMarshaler[] quantileValues; - private final KeyValueMarshaler[] attributes; + private final MarshalerWithSize[] attributes; static SummaryDataPointMarshaler[] createRepeated(Collection points) { SummaryDataPointMarshaler[] marshalers = new SummaryDataPointMarshaler[points.size()]; @@ -34,8 +34,8 @@ static SummaryDataPointMarshaler[] createRepeated(Collection p static SummaryDataPointMarshaler create(SummaryPointData point) { ValueAtQuantileMarshaler[] quantileMarshalers = ValueAtQuantileMarshaler.createRepeated(point.getValues()); - KeyValueMarshaler[] attributeMarshalers = - KeyValueMarshaler.createRepeated(point.getAttributes()); + MarshalerWithSize[] attributeMarshalers = + KeyValueMarshaler.createForAttributes(point.getAttributes()); return new SummaryDataPointMarshaler( point.getStartEpochNanos(), @@ -52,7 +52,7 @@ private SummaryDataPointMarshaler( long count, double sum, ValueAtQuantileMarshaler[] quantileValues, - KeyValueMarshaler[] attributes) { + MarshalerWithSize[] attributes) { super(calculateSize(startTimeUnixNano, timeUnixNano, count, sum, quantileValues, attributes)); this.startTimeUnixNano = startTimeUnixNano; this.timeUnixNano = timeUnixNano; @@ -78,7 +78,7 @@ private static int calculateSize( long count, double sum, ValueAtQuantileMarshaler[] quantileValues, - KeyValueMarshaler[] attributes) { + MarshalerWithSize[] attributes) { int size = 0; size += MarshalerUtil.sizeFixed64(SummaryDataPoint.START_TIME_UNIX_NANO, startTimeUnixNano); size += MarshalerUtil.sizeFixed64(SummaryDataPoint.TIME_UNIX_NANO, timeUnixNano); diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SummaryDataPointStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SummaryDataPointStatelessMarshaler.java new file mode 100644 index 00000000000..bcd2306bb13 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SummaryDataPointStatelessMarshaler.java @@ -0,0 +1,67 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.AttributeKeyValueStatelessMarshaler; +import io.opentelemetry.proto.metrics.v1.internal.SummaryDataPoint; +import io.opentelemetry.sdk.metrics.data.SummaryPointData; +import java.io.IOException; + +/** See {@link SummaryDataPointMarshaler}. */ +final class SummaryDataPointStatelessMarshaler implements StatelessMarshaler { + static final SummaryDataPointStatelessMarshaler INSTANCE = + new SummaryDataPointStatelessMarshaler(); + + private SummaryDataPointStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, SummaryPointData point, MarshalerContext context) + throws IOException { + output.serializeFixed64(SummaryDataPoint.START_TIME_UNIX_NANO, point.getStartEpochNanos()); + output.serializeFixed64(SummaryDataPoint.TIME_UNIX_NANO, point.getEpochNanos()); + output.serializeFixed64(SummaryDataPoint.COUNT, point.getCount()); + output.serializeDouble(SummaryDataPoint.SUM, point.getSum()); + output.serializeRepeatedMessageWithContext( + SummaryDataPoint.QUANTILE_VALUES, + point.getValues(), + ValueAtQuantileStatelessMarshaler.INSTANCE, + context); + output.serializeRepeatedMessageWithContext( + SummaryDataPoint.ATTRIBUTES, + point.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + } + + @Override + public int getBinarySerializedSize(SummaryPointData point, MarshalerContext context) { + int size = 0; + size += + MarshalerUtil.sizeFixed64( + SummaryDataPoint.START_TIME_UNIX_NANO, point.getStartEpochNanos()); + size += MarshalerUtil.sizeFixed64(SummaryDataPoint.TIME_UNIX_NANO, point.getEpochNanos()); + size += MarshalerUtil.sizeFixed64(SummaryDataPoint.COUNT, point.getCount()); + size += MarshalerUtil.sizeDouble(SummaryDataPoint.SUM, point.getSum()); + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + SummaryDataPoint.QUANTILE_VALUES, + point.getValues(), + ValueAtQuantileStatelessMarshaler.INSTANCE, + context); + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + SummaryDataPoint.ATTRIBUTES, + point.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SummaryStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SummaryStatelessMarshaler.java new file mode 100644 index 00000000000..b15984592b8 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/SummaryStatelessMarshaler.java @@ -0,0 +1,44 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.metrics.v1.internal.Summary; +import io.opentelemetry.sdk.metrics.data.SummaryData; +import java.io.IOException; + +/** See {@link SummaryMarshaler}. */ +final class SummaryStatelessMarshaler implements StatelessMarshaler { + static final SummaryStatelessMarshaler INSTANCE = new SummaryStatelessMarshaler(); + private static final MarshalerContext.Key DATA_POINT_SIZE_CALCULATOR_KEY = MarshalerContext.key(); + private static final MarshalerContext.Key DATA_POINT_WRITER_KEY = MarshalerContext.key(); + + private SummaryStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, SummaryData summary, MarshalerContext context) + throws IOException { + output.serializeRepeatedMessageWithContext( + Summary.DATA_POINTS, + summary.getPoints(), + SummaryDataPointStatelessMarshaler.INSTANCE, + context, + DATA_POINT_WRITER_KEY); + } + + @Override + public int getBinarySerializedSize(SummaryData summary, MarshalerContext context) { + return StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + Summary.DATA_POINTS, + summary.getPoints(), + SummaryDataPointStatelessMarshaler.INSTANCE, + context, + DATA_POINT_SIZE_CALCULATOR_KEY); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ValueAtQuantileMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ValueAtQuantileMarshaler.java index 6f9ec38ef61..1c2e6ba09db 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ValueAtQuantileMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ValueAtQuantileMarshaler.java @@ -42,7 +42,7 @@ public void writeTo(Serializer output) throws IOException { output.serializeDouble(SummaryDataPoint.ValueAtQuantile.VALUE, value); } - private static int calculateSize(double quantile, double value) { + static int calculateSize(double quantile, double value) { int size = 0; size += MarshalerUtil.sizeDouble(SummaryDataPoint.ValueAtQuantile.QUANTILE, quantile); size += MarshalerUtil.sizeDouble(SummaryDataPoint.ValueAtQuantile.VALUE, value); diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ValueAtQuantileStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ValueAtQuantileStatelessMarshaler.java new file mode 100644 index 00000000000..ba1f0347865 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/metrics/ValueAtQuantileStatelessMarshaler.java @@ -0,0 +1,32 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.proto.metrics.v1.internal.SummaryDataPoint; +import io.opentelemetry.sdk.metrics.data.ValueAtQuantile; +import java.io.IOException; + +/** See {@link ValueAtQuantileMarshaler}. */ +final class ValueAtQuantileStatelessMarshaler implements StatelessMarshaler { + static final ValueAtQuantileStatelessMarshaler INSTANCE = new ValueAtQuantileStatelessMarshaler(); + + private ValueAtQuantileStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, ValueAtQuantile value, MarshalerContext context) + throws IOException { + output.serializeDouble(SummaryDataPoint.ValueAtQuantile.QUANTILE, value.getQuantile()); + output.serializeDouble(SummaryDataPoint.ValueAtQuantile.VALUE, value.getValue()); + } + + @Override + public int getBinarySerializedSize(ValueAtQuantile value, MarshalerContext context) { + return ValueAtQuantileMarshaler.calculateSize(value.getQuantile(), value.getValue()); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/InstrumentationScopeSpansStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/InstrumentationScopeSpansStatelessMarshaler.java new file mode 100644 index 00000000000..decec18ea79 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/InstrumentationScopeSpansStatelessMarshaler.java @@ -0,0 +1,65 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.traces; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler2; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.InstrumentationScopeMarshaler; +import io.opentelemetry.proto.trace.v1.internal.ScopeSpans; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.trace.data.SpanData; +import java.io.IOException; +import java.util.List; + +/** See {@link InstrumentationScopeSpansMarshaler}. */ +final class InstrumentationScopeSpansStatelessMarshaler + implements StatelessMarshaler2> { + static final InstrumentationScopeSpansStatelessMarshaler INSTANCE = + new InstrumentationScopeSpansStatelessMarshaler(); + + private InstrumentationScopeSpansStatelessMarshaler() {} + + @Override + public void writeTo( + Serializer output, + InstrumentationScopeInfo instrumentationScope, + List spans, + MarshalerContext context) + throws IOException { + InstrumentationScopeMarshaler instrumentationScopeMarshaler = + context.getData(InstrumentationScopeMarshaler.class); + + output.serializeMessage(ScopeSpans.SCOPE, instrumentationScopeMarshaler); + output.serializeRepeatedMessageWithContext( + ScopeSpans.SPANS, spans, SpanStatelessMarshaler.INSTANCE, context); + output.serializeStringWithContext( + ScopeSpans.SCHEMA_URL, instrumentationScope.getSchemaUrl(), context); + } + + @Override + public int getBinarySerializedSize( + InstrumentationScopeInfo instrumentationScope, + List spans, + MarshalerContext context) { + InstrumentationScopeMarshaler instrumentationScopeMarshaler = + InstrumentationScopeMarshaler.create(instrumentationScope); + context.addData(instrumentationScopeMarshaler); + + int size = 0; + size += MarshalerUtil.sizeMessage(ScopeSpans.SCOPE, instrumentationScopeMarshaler); + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ScopeSpans.SPANS, spans, SpanStatelessMarshaler.INSTANCE, context); + size += + StatelessMarshalerUtil.sizeStringWithContext( + ScopeSpans.SCHEMA_URL, instrumentationScope.getSchemaUrl(), context); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/LowAllocationTraceRequestMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/LowAllocationTraceRequestMarshaler.java new file mode 100644 index 00000000000..5da173f3c3a --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/LowAllocationTraceRequestMarshaler.java @@ -0,0 +1,107 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.traces; + +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.collector.trace.v1.internal.ExportTraceServiceRequest; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.data.SpanData; +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * {@link Marshaler} to convert SDK {@link SpanData} to OTLP ExportTraceServiceRequest. See {@link + * TraceRequestMarshaler}. + * + *

Example usage: + * + *

{@code
+ * void marshal(LowAllocationTraceRequestMarshaler requestMarshaler, OutputStream output,
+ *     List spanList) throws IOException {
+ *   requestMarshaler.initialize(spanList);
+ *   try {
+ *     requestMarshaler.writeBinaryTo(output);
+ *   } finally {
+ *     requestMarshaler.reset();
+ *   }
+ * }
+ * }
+ * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class LowAllocationTraceRequestMarshaler extends Marshaler { + private static final MarshalerContext.Key RESOURCE_SPAN_SIZE_CALCULATOR_KEY = + MarshalerContext.key(); + private static final MarshalerContext.Key RESOURCE_SPAN_WRITER_KEY = MarshalerContext.key(); + + private final MarshalerContext context = new MarshalerContext(); + + @SuppressWarnings("NullAway") + private Map>> resourceAndScopeMap; + + private int size; + + public void initialize(Collection spanDataList) { + resourceAndScopeMap = groupByResourceAndScope(context, spanDataList); + size = calculateSize(context, resourceAndScopeMap); + } + + public void reset() { + context.reset(); + } + + @Override + public int getBinarySerializedSize() { + return size; + } + + @Override + public void writeTo(Serializer output) throws IOException { + // serializing can be retried, reset the indexes, so we could call writeTo multiple times + context.resetReadIndex(); + output.serializeRepeatedMessageWithContext( + ExportTraceServiceRequest.RESOURCE_SPANS, + resourceAndScopeMap, + ResourceSpansStatelessMarshaler.INSTANCE, + context, + RESOURCE_SPAN_WRITER_KEY); + } + + private static int calculateSize( + MarshalerContext context, + Map>> resourceAndScopeMap) { + return StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ExportTraceServiceRequest.RESOURCE_SPANS, + resourceAndScopeMap, + ResourceSpansStatelessMarshaler.INSTANCE, + context, + RESOURCE_SPAN_SIZE_CALCULATOR_KEY); + } + + private static Map>> + groupByResourceAndScope(MarshalerContext context, Collection spanDataList) { + + if (spanDataList.isEmpty()) { + return Collections.emptyMap(); + } + + return StatelessMarshalerUtil.groupByResourceAndScope( + spanDataList, + // TODO(anuraaga): Replace with an internal SdkData type of interface that exposes these + // two. + SpanData::getResource, + SpanData::getInstrumentationScopeInfo, + context); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/ResourceSpansStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/ResourceSpansStatelessMarshaler.java new file mode 100644 index 00000000000..7ae30feb91d --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/ResourceSpansStatelessMarshaler.java @@ -0,0 +1,82 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.traces; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler2; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.ResourceMarshaler; +import io.opentelemetry.proto.trace.v1.internal.ResourceSpans; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.data.SpanData; +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * A Marshaler of ResourceSpans. See {@link ResourceSpansMarshaler}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class ResourceSpansStatelessMarshaler + implements StatelessMarshaler2>> { + static final ResourceSpansStatelessMarshaler INSTANCE = new ResourceSpansStatelessMarshaler(); + private static final MarshalerContext.Key SCOPE_SPAN_WRITER_KEY = MarshalerContext.key(); + private static final MarshalerContext.Key SCOPE_SPAN_SIZE_CALCULATOR_KEY = MarshalerContext.key(); + + private ResourceSpansStatelessMarshaler() {} + + @Override + public void writeTo( + Serializer output, + Resource resource, + Map> scopeMap, + MarshalerContext context) + throws IOException { + ResourceMarshaler resourceMarshaler = context.getData(ResourceMarshaler.class); + output.serializeMessage(ResourceSpans.RESOURCE, resourceMarshaler); + + output.serializeRepeatedMessageWithContext( + ResourceSpans.SCOPE_SPANS, + scopeMap, + InstrumentationScopeSpansStatelessMarshaler.INSTANCE, + context, + SCOPE_SPAN_WRITER_KEY); + + output.serializeStringWithContext(ResourceSpans.SCHEMA_URL, resource.getSchemaUrl(), context); + } + + @Override + public int getBinarySerializedSize( + Resource resource, + Map> scopeMap, + MarshalerContext context) { + + int size = 0; + + ResourceMarshaler resourceMarshaler = ResourceMarshaler.create(resource); + context.addData(resourceMarshaler); + size += MarshalerUtil.sizeMessage(ResourceSpans.RESOURCE, resourceMarshaler); + + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + ResourceSpans.SCOPE_SPANS, + scopeMap, + InstrumentationScopeSpansStatelessMarshaler.INSTANCE, + context, + SCOPE_SPAN_SIZE_CALCULATOR_KEY); + + size += + StatelessMarshalerUtil.sizeStringWithContext( + ResourceSpans.SCHEMA_URL, resource.getSchemaUrl(), context); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanEventMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanEventMarshaler.java index 03b208f3df0..2b2cc1d7775 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanEventMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanEventMarshaler.java @@ -40,7 +40,7 @@ static SpanEventMarshaler create(EventData event) { return new SpanEventMarshaler( event.getEpochNanos(), MarshalerUtil.toBytes(event.getName()), - KeyValueMarshaler.createRepeated(event.getAttributes()), + KeyValueMarshaler.createForAttributes(event.getAttributes()), event.getTotalAttributeCount() - event.getAttributes().size()); } diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanEventStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanEventStatelessMarshaler.java new file mode 100644 index 00000000000..a1878c3099b --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanEventStatelessMarshaler.java @@ -0,0 +1,54 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.traces; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.AttributeKeyValueStatelessMarshaler; +import io.opentelemetry.proto.trace.v1.internal.Span; +import io.opentelemetry.sdk.trace.data.EventData; +import java.io.IOException; + +/** See {@link SpanEventMarshaler}. */ +final class SpanEventStatelessMarshaler implements StatelessMarshaler { + static final SpanEventStatelessMarshaler INSTANCE = new SpanEventStatelessMarshaler(); + + private SpanEventStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, EventData event, MarshalerContext context) + throws IOException { + output.serializeFixed64(Span.Event.TIME_UNIX_NANO, event.getEpochNanos()); + output.serializeStringWithContext(Span.Event.NAME, event.getName(), context); + output.serializeRepeatedMessageWithContext( + Span.Event.ATTRIBUTES, + event.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + int droppedAttributesCount = event.getTotalAttributeCount() - event.getAttributes().size(); + output.serializeUInt32(Span.Event.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); + } + + @Override + public int getBinarySerializedSize(EventData event, MarshalerContext context) { + int size = 0; + size += MarshalerUtil.sizeFixed64(Span.Event.TIME_UNIX_NANO, event.getEpochNanos()); + size += StatelessMarshalerUtil.sizeStringWithContext(Span.Event.NAME, event.getName(), context); + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + Span.Event.ATTRIBUTES, + event.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + int droppedAttributesCount = event.getTotalAttributeCount() - event.getAttributes().size(); + size += MarshalerUtil.sizeUInt32(Span.Event.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanFlags.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanFlags.java new file mode 100644 index 00000000000..571be523492 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanFlags.java @@ -0,0 +1,98 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.traces; + +import io.opentelemetry.api.trace.TraceFlags; + +/** + * Represents the 32 bit span flags as + * specified in the proto definition. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class SpanFlags { + // As defined at: + // https://github.com/open-telemetry/opentelemetry-proto/blob/342e1d4c3a1fe43312823ffb53bd38327f263059/opentelemetry/proto/trace/v1/trace.proto#L351-L352 + static final int CONTEXT_HAS_IS_REMOTE_BIT = 0x00000100; + static final int CONTEXT_IS_REMOTE_BIT = 0x00000200; + static final int CONTEXT_IS_REMOTE_MASK = CONTEXT_HAS_IS_REMOTE_BIT | CONTEXT_IS_REMOTE_BIT; + + private SpanFlags() {} + + /** + * Returns the int (fixed32) representation of the {@link TraceFlags} enriched with the flags + * indicating a remote parent. + * + * @param isParentRemote indicates whether the parent context is remote + * @return the int (fixed32) representation of the {@link TraceFlags} enriched with the flags + * indicating a remote parent. + */ + public static int withParentIsRemoteFlags(TraceFlags traceFlags, boolean isParentRemote) { + byte byteRep = traceFlags.asByte(); + if (isParentRemote) { + return (byteRep & 0xff) | CONTEXT_IS_REMOTE_MASK; + } + return (byteRep & 0xff) | CONTEXT_HAS_IS_REMOTE_BIT; + } + + /** + * Returns the int (fixed32) representation of the 4 bytes flags with the + * has_parent_context_is_remote flag bit on. + * + * @return the int (fixed32) representation of the 4 bytes flags with the * + * has_parent_context_is_remote flag bit on. + */ + public static int getHasParentIsRemoteMask() { + return CONTEXT_HAS_IS_REMOTE_BIT; + } + + /** + * Checks whether the given flags contain information about parent context being remote or not. + * + * @param flags The int representation of the 32 bit span flags field defined in proto. + * @return True, if the given flags contain information about the span's parent context being + * remote, otherwise, false. + */ + public static boolean isKnownWhetherParentIsRemote(int flags) { + return (flags & CONTEXT_HAS_IS_REMOTE_BIT) != 0; + } + + /** + * Returns the int (fixed32) representation of the 4 bytes flags with the + * has_parent_context_is_remote and parent_context_is_remote flag bits on. + * + * @return the int (fixed32) representation of the 4 bytes flags with the + * has_parent_context_is_remote and parent_context_is_remote flag bits on. + */ + public static int getParentIsRemoteMask() { + return CONTEXT_IS_REMOTE_MASK; + } + + /** + * Checks whether in the given flags the parent is marked as remote. + * + * @param flags The int representation of the 32 bit span flags field defined in proto. + * @return True, if the given flags contain information about the span's parent context and the + * parent is marked as remote, otherwise false. + */ + public static boolean isParentRemote(int flags) { + return (flags & CONTEXT_IS_REMOTE_MASK) == CONTEXT_IS_REMOTE_MASK; + } + + /** + * Returns the W3C {@link TraceFlags} (least significant 8 bits) portion from the given 32 bit + * span flags fields. + * + * @param flags The int representation of the 32 bit span flags field defined in proto. + * @return the W3C {@link TraceFlags} (least significant 8 bits) portion from the given 32 bit + * span flags fields. + */ + public static TraceFlags getTraceFlags(int flags) { + return TraceFlags.fromByte((byte) (flags & 0xff)); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanLinkMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanLinkMarshaler.java index 0579fb56e10..86db5e2c983 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanLinkMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanLinkMarshaler.java @@ -5,8 +5,9 @@ package io.opentelemetry.exporter.internal.otlp.traces; -import static io.opentelemetry.api.trace.propagation.internal.W3CTraceContextEncoding.encodeTraceState; +import static io.opentelemetry.exporter.internal.otlp.traces.SpanMarshaler.encodeTraceState; +import io.opentelemetry.api.trace.TraceFlags; import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; @@ -15,17 +16,18 @@ import io.opentelemetry.proto.trace.v1.internal.Span; import io.opentelemetry.sdk.trace.data.LinkData; import java.io.IOException; -import java.nio.charset.StandardCharsets; import java.util.List; final class SpanLinkMarshaler extends MarshalerWithSize { private static final SpanLinkMarshaler[] EMPTY = new SpanLinkMarshaler[0]; - private static final byte[] EMPTY_BYTES = new byte[0]; + private final String traceId; private final String spanId; private final byte[] traceStateUtf8; private final KeyValueMarshaler[] attributeMarshalers; private final int droppedAttributesCount; + private final TraceFlags traceFlags; + private final boolean isLinkContextRemote; static SpanLinkMarshaler[] createRepeated(List links) { if (links.isEmpty()) { @@ -43,33 +45,42 @@ static SpanLinkMarshaler[] createRepeated(List links) { // Visible for testing static SpanLinkMarshaler create(LinkData link) { - TraceState traceState = link.getSpanContext().getTraceState(); - byte[] traceStateUtf8 = - traceState.isEmpty() - ? EMPTY_BYTES - : encodeTraceState(traceState).getBytes(StandardCharsets.UTF_8); + byte[] traceStateUtf8 = encodeSpanLinkTraceState(link); + return new SpanLinkMarshaler( link.getSpanContext().getTraceId(), link.getSpanContext().getSpanId(), + link.getSpanContext().getTraceFlags(), traceStateUtf8, - KeyValueMarshaler.createRepeated(link.getAttributes()), - link.getTotalAttributeCount() - link.getAttributes().size()); + KeyValueMarshaler.createForAttributes(link.getAttributes()), + link.getTotalAttributeCount() - link.getAttributes().size(), + link.getSpanContext().isRemote()); } private SpanLinkMarshaler( String traceId, String spanId, + TraceFlags traceFlags, byte[] traceStateUtf8, KeyValueMarshaler[] attributeMarshalers, - int droppedAttributesCount) { + int droppedAttributesCount, + boolean isLinkContextRemote) { super( calculateSize( - traceId, spanId, traceStateUtf8, attributeMarshalers, droppedAttributesCount)); + traceId, + spanId, + traceFlags, + traceStateUtf8, + attributeMarshalers, + droppedAttributesCount, + isLinkContextRemote)); this.traceId = traceId; this.spanId = spanId; + this.traceFlags = traceFlags; this.traceStateUtf8 = traceStateUtf8; this.attributeMarshalers = attributeMarshalers; this.droppedAttributesCount = droppedAttributesCount; + this.isLinkContextRemote = isLinkContextRemote; } @Override @@ -79,20 +90,32 @@ public void writeTo(Serializer output) throws IOException { output.serializeString(Span.Link.TRACE_STATE, traceStateUtf8); output.serializeRepeatedMessage(Span.Link.ATTRIBUTES, attributeMarshalers); output.serializeUInt32(Span.Link.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); + output.serializeFixed32( + Span.Link.FLAGS, SpanFlags.withParentIsRemoteFlags(traceFlags, isLinkContextRemote)); } private static int calculateSize( String traceId, String spanId, + TraceFlags flags, byte[] traceStateUtf8, KeyValueMarshaler[] attributeMarshalers, - int droppedAttributesCount) { + int droppedAttributesCount, + boolean isLinkContextRemote) { int size = 0; size += MarshalerUtil.sizeTraceId(Span.Link.TRACE_ID, traceId); size += MarshalerUtil.sizeSpanId(Span.Link.SPAN_ID, spanId); size += MarshalerUtil.sizeBytes(Span.Link.TRACE_STATE, traceStateUtf8); size += MarshalerUtil.sizeRepeatedMessage(Span.Link.ATTRIBUTES, attributeMarshalers); size += MarshalerUtil.sizeUInt32(Span.Link.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); + size += + MarshalerUtil.sizeFixed32( + Span.Link.FLAGS, SpanFlags.withParentIsRemoteFlags(flags, isLinkContextRemote)); return size; } + + static byte[] encodeSpanLinkTraceState(LinkData link) { + TraceState traceState = link.getSpanContext().getTraceState(); + return encodeTraceState(traceState); + } } diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanLinkStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanLinkStatelessMarshaler.java new file mode 100644 index 00000000000..86d3aef37fb --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanLinkStatelessMarshaler.java @@ -0,0 +1,70 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.traces; + +import static io.opentelemetry.exporter.internal.otlp.traces.SpanLinkMarshaler.encodeSpanLinkTraceState; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.AttributeKeyValueStatelessMarshaler; +import io.opentelemetry.proto.trace.v1.internal.Span; +import io.opentelemetry.sdk.trace.data.LinkData; +import java.io.IOException; + +/** See {@link SpanLinkMarshaler}. */ +final class SpanLinkStatelessMarshaler implements StatelessMarshaler { + static final SpanLinkStatelessMarshaler INSTANCE = new SpanLinkStatelessMarshaler(); + + private SpanLinkStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, LinkData link, MarshalerContext context) + throws IOException { + output.serializeTraceId(Span.Link.TRACE_ID, link.getSpanContext().getTraceId(), context); + output.serializeSpanId(Span.Link.SPAN_ID, link.getSpanContext().getSpanId(), context); + output.serializeString(Span.Link.TRACE_STATE, context.getData(byte[].class)); + output.serializeRepeatedMessageWithContext( + Span.Link.ATTRIBUTES, + link.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + int droppedAttributesCount = link.getTotalAttributeCount() - link.getAttributes().size(); + output.serializeUInt32(Span.Link.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); + output.serializeFixed32( + Span.Link.FLAGS, + SpanFlags.withParentIsRemoteFlags( + link.getSpanContext().getTraceFlags(), link.getSpanContext().isRemote())); + } + + @Override + public int getBinarySerializedSize(LinkData link, MarshalerContext context) { + byte[] traceStateUtf8 = encodeSpanLinkTraceState(link); + context.addData(traceStateUtf8); + + int size = 0; + size += MarshalerUtil.sizeTraceId(Span.Link.TRACE_ID, link.getSpanContext().getTraceId()); + size += MarshalerUtil.sizeSpanId(Span.Link.SPAN_ID, link.getSpanContext().getSpanId()); + size += MarshalerUtil.sizeBytes(Span.Link.TRACE_STATE, traceStateUtf8); + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + Span.Link.ATTRIBUTES, + link.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + int droppedAttributesCount = link.getTotalAttributeCount() - link.getAttributes().size(); + size += MarshalerUtil.sizeUInt32(Span.Link.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); + size += + MarshalerUtil.sizeFixed32( + Span.Link.FLAGS, + SpanFlags.withParentIsRemoteFlags( + link.getSpanContext().getTraceFlags(), link.getSpanContext().isRemote())); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanMarshaler.java index b487f262a3c..2e862c6cc69 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanMarshaler.java @@ -5,10 +5,10 @@ package io.opentelemetry.exporter.internal.otlp.traces; -import static io.opentelemetry.api.trace.propagation.internal.W3CTraceContextEncoding.encodeTraceState; - import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.api.trace.TraceFlags; import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.api.trace.propagation.internal.W3CTraceContextEncoding; import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; import io.opentelemetry.exporter.internal.marshal.ProtoEnumInfo; @@ -37,11 +37,13 @@ final class SpanMarshaler extends MarshalerWithSize { private final SpanLinkMarshaler[] spanLinkMarshalers; private final int droppedLinksCount; private final SpanStatusMarshaler spanStatusMarshaler; + private final TraceFlags flags; + private final boolean isParentContextRemote; // Because SpanMarshaler is always part of a repeated field, it cannot return "null". static SpanMarshaler create(SpanData spanData) { KeyValueMarshaler[] attributeMarshalers = - KeyValueMarshaler.createRepeated(spanData.getAttributes()); + KeyValueMarshaler.createForAttributes(spanData.getAttributes()); SpanEventMarshaler[] spanEventMarshalers = SpanEventMarshaler.createRepeated(spanData.getEvents()); SpanLinkMarshaler[] spanLinkMarshalers = SpanLinkMarshaler.createRepeated(spanData.getLinks()); @@ -51,11 +53,7 @@ static SpanMarshaler create(SpanData spanData) { ? spanData.getParentSpanContext().getSpanId() : null; - TraceState traceState = spanData.getSpanContext().getTraceState(); - byte[] traceStateUtf8 = - traceState.isEmpty() - ? EMPTY_BYTES - : encodeTraceState(traceState).getBytes(StandardCharsets.UTF_8); + byte[] traceStateUtf8 = encodeSpanTraceState(spanData); return new SpanMarshaler( spanData.getSpanContext().getTraceId(), @@ -72,7 +70,9 @@ static SpanMarshaler create(SpanData spanData) { spanData.getTotalRecordedEvents() - spanData.getEvents().size(), spanLinkMarshalers, spanData.getTotalRecordedLinks() - spanData.getLinks().size(), - SpanStatusMarshaler.create(spanData.getStatus())); + SpanStatusMarshaler.create(spanData.getStatus()), + spanData.getSpanContext().getTraceFlags(), + spanData.getParentSpanContext().isRemote()); } private SpanMarshaler( @@ -90,7 +90,9 @@ private SpanMarshaler( int droppedEventsCount, SpanLinkMarshaler[] spanLinkMarshalers, int droppedLinksCount, - SpanStatusMarshaler spanStatusMarshaler) { + SpanStatusMarshaler spanStatusMarshaler, + TraceFlags flags, + boolean isParentContextRemote) { super( calculateSize( traceId, @@ -107,7 +109,9 @@ private SpanMarshaler( droppedEventsCount, spanLinkMarshalers, droppedLinksCount, - spanStatusMarshaler)); + spanStatusMarshaler, + flags, + isParentContextRemote)); this.traceId = traceId; this.spanId = spanId; this.traceStateUtf8 = traceStateUtf8; @@ -123,6 +127,8 @@ private SpanMarshaler( this.spanLinkMarshalers = spanLinkMarshalers; this.droppedLinksCount = droppedLinksCount; this.spanStatusMarshaler = spanStatusMarshaler; + this.flags = flags; + this.isParentContextRemote = isParentContextRemote; } @Override @@ -148,6 +154,8 @@ public void writeTo(Serializer output) throws IOException { output.serializeUInt32(Span.DROPPED_LINKS_COUNT, droppedLinksCount); output.serializeMessage(Span.STATUS, spanStatusMarshaler); + output.serializeFixed32( + Span.FLAGS, SpanFlags.withParentIsRemoteFlags(flags, isParentContextRemote)); } private static int calculateSize( @@ -165,7 +173,9 @@ private static int calculateSize( int droppedEventsCount, SpanLinkMarshaler[] spanLinkMarshalers, int droppedLinksCount, - SpanStatusMarshaler spanStatusMarshaler) { + SpanStatusMarshaler spanStatusMarshaler, + TraceFlags flags, + boolean isParentContextRemote) { int size = 0; size += MarshalerUtil.sizeTraceId(Span.TRACE_ID, traceId); size += MarshalerUtil.sizeSpanId(Span.SPAN_ID, spanId); @@ -188,6 +198,9 @@ private static int calculateSize( size += MarshalerUtil.sizeUInt32(Span.DROPPED_LINKS_COUNT, droppedLinksCount); size += MarshalerUtil.sizeMessage(Span.STATUS, spanStatusMarshaler); + size += + MarshalerUtil.sizeFixed32( + Span.FLAGS, SpanFlags.withParentIsRemoteFlags(flags, isParentContextRemote)); return size; } @@ -208,4 +221,15 @@ static ProtoEnumInfo toProtoSpanKind(SpanKind kind) { // NB: Should not be possible with aligned versions. return Span.SpanKind.SPAN_KIND_UNSPECIFIED; } + + static byte[] encodeSpanTraceState(SpanData span) { + TraceState traceState = span.getSpanContext().getTraceState(); + return encodeTraceState(traceState); + } + + static byte[] encodeTraceState(TraceState traceState) { + return traceState.isEmpty() + ? EMPTY_BYTES + : W3CTraceContextEncoding.encodeTraceState(traceState).getBytes(StandardCharsets.UTF_8); + } } diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanReusableDataMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanReusableDataMarshaler.java new file mode 100644 index 00000000000..b359b6cc30c --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanReusableDataMarshaler.java @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.traces; + +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.trace.data.SpanData; +import java.util.Collection; +import java.util.Deque; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.function.BiFunction; + +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ +public class SpanReusableDataMarshaler { + + private final Deque marshalerPool = + new ConcurrentLinkedDeque<>(); + + private final MemoryMode memoryMode; + private final BiFunction doExport; + + public SpanReusableDataMarshaler( + MemoryMode memoryMode, BiFunction doExport) { + this.memoryMode = memoryMode; + this.doExport = doExport; + } + + public MemoryMode getMemoryMode() { + return memoryMode; + } + + public CompletableResultCode export(Collection spans) { + if (memoryMode == MemoryMode.REUSABLE_DATA) { + LowAllocationTraceRequestMarshaler marshaler = marshalerPool.poll(); + if (marshaler == null) { + marshaler = new LowAllocationTraceRequestMarshaler(); + } + LowAllocationTraceRequestMarshaler exportMarshaler = marshaler; + exportMarshaler.initialize(spans); + return doExport + .apply(exportMarshaler, spans.size()) + .whenComplete( + () -> { + exportMarshaler.reset(); + marshalerPool.add(exportMarshaler); + }); + } + // MemoryMode == MemoryMode.IMMUTABLE_DATA + TraceRequestMarshaler request = TraceRequestMarshaler.create(spans); + return doExport.apply(request, spans.size()); + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanStatelessMarshaler.java new file mode 100644 index 00000000000..acfa5cae85a --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanStatelessMarshaler.java @@ -0,0 +1,125 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.traces; + +import static io.opentelemetry.exporter.internal.otlp.traces.SpanMarshaler.encodeSpanTraceState; +import static io.opentelemetry.exporter.internal.otlp.traces.SpanMarshaler.toProtoSpanKind; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.exporter.internal.otlp.AttributeKeyValueStatelessMarshaler; +import io.opentelemetry.proto.trace.v1.internal.Span; +import io.opentelemetry.sdk.trace.data.SpanData; +import java.io.IOException; + +/** See {@link SpanMarshaler}. */ +final class SpanStatelessMarshaler implements StatelessMarshaler { + static final SpanStatelessMarshaler INSTANCE = new SpanStatelessMarshaler(); + + private SpanStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, SpanData span, MarshalerContext context) + throws IOException { + output.serializeTraceId(Span.TRACE_ID, span.getTraceId(), context); + output.serializeSpanId(Span.SPAN_ID, span.getSpanId(), context); + + byte[] traceStateUtf8 = context.getData(byte[].class); + output.serializeString(Span.TRACE_STATE, traceStateUtf8); + String parentSpanId = + span.getParentSpanContext().isValid() ? span.getParentSpanContext().getSpanId() : null; + output.serializeSpanId(Span.PARENT_SPAN_ID, parentSpanId, context); + + output.serializeStringWithContext(Span.NAME, span.getName(), context); + output.serializeEnum(Span.KIND, toProtoSpanKind(span.getKind())); + + output.serializeFixed64(Span.START_TIME_UNIX_NANO, span.getStartEpochNanos()); + output.serializeFixed64(Span.END_TIME_UNIX_NANO, span.getEndEpochNanos()); + + output.serializeRepeatedMessageWithContext( + Span.ATTRIBUTES, + span.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + int droppedAttributesCount = span.getTotalAttributeCount() - span.getAttributes().size(); + output.serializeUInt32(Span.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); + + output.serializeRepeatedMessageWithContext( + Span.EVENTS, span.getEvents(), SpanEventStatelessMarshaler.INSTANCE, context); + int droppedEventsCount = span.getTotalRecordedEvents() - span.getEvents().size(); + output.serializeUInt32(Span.DROPPED_EVENTS_COUNT, droppedEventsCount); + + output.serializeRepeatedMessageWithContext( + Span.LINKS, span.getLinks(), SpanLinkStatelessMarshaler.INSTANCE, context); + int droppedLinksCount = span.getTotalRecordedLinks() - span.getLinks().size(); + output.serializeUInt32(Span.DROPPED_LINKS_COUNT, droppedLinksCount); + + output.serializeMessageWithContext( + Span.STATUS, span.getStatus(), SpanStatusStatelessMarshaler.INSTANCE, context); + + output.serializeFixed32( + Span.FLAGS, + SpanFlags.withParentIsRemoteFlags( + span.getSpanContext().getTraceFlags(), span.getParentSpanContext().isRemote())); + } + + @Override + public int getBinarySerializedSize(SpanData span, MarshalerContext context) { + int size = 0; + size += MarshalerUtil.sizeTraceId(Span.TRACE_ID, span.getTraceId()); + size += MarshalerUtil.sizeSpanId(Span.SPAN_ID, span.getSpanId()); + + byte[] traceStateUtf8 = encodeSpanTraceState(span); + context.addData(traceStateUtf8); + + size += MarshalerUtil.sizeBytes(Span.TRACE_STATE, traceStateUtf8); + String parentSpanId = + span.getParentSpanContext().isValid() ? span.getParentSpanContext().getSpanId() : null; + size += MarshalerUtil.sizeSpanId(Span.PARENT_SPAN_ID, parentSpanId); + + size += StatelessMarshalerUtil.sizeStringWithContext(Span.NAME, span.getName(), context); + size += MarshalerUtil.sizeEnum(Span.KIND, toProtoSpanKind(span.getKind())); + + size += MarshalerUtil.sizeFixed64(Span.START_TIME_UNIX_NANO, span.getStartEpochNanos()); + size += MarshalerUtil.sizeFixed64(Span.END_TIME_UNIX_NANO, span.getEndEpochNanos()); + + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + Span.ATTRIBUTES, + span.getAttributes(), + AttributeKeyValueStatelessMarshaler.INSTANCE, + context); + int droppedAttributesCount = span.getTotalAttributeCount() - span.getAttributes().size(); + size += MarshalerUtil.sizeUInt32(Span.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); + + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + Span.EVENTS, span.getEvents(), SpanEventStatelessMarshaler.INSTANCE, context); + int droppedEventsCount = span.getTotalRecordedEvents() - span.getEvents().size(); + size += MarshalerUtil.sizeUInt32(Span.DROPPED_EVENTS_COUNT, droppedEventsCount); + + size += + StatelessMarshalerUtil.sizeRepeatedMessageWithContext( + Span.LINKS, span.getLinks(), SpanLinkStatelessMarshaler.INSTANCE, context); + int droppedLinksCount = span.getTotalRecordedLinks() - span.getLinks().size(); + size += MarshalerUtil.sizeUInt32(Span.DROPPED_LINKS_COUNT, droppedLinksCount); + + size += + StatelessMarshalerUtil.sizeMessageWithContext( + Span.STATUS, span.getStatus(), SpanStatusStatelessMarshaler.INSTANCE, context); + + size += + MarshalerUtil.sizeFixed32( + Span.FLAGS, + SpanFlags.withParentIsRemoteFlags( + span.getSpanContext().getTraceFlags(), span.getParentSpanContext().isRemote())); + + return size; + } +} diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanStatusMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanStatusMarshaler.java index 0809e85c978..981bf481005 100644 --- a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanStatusMarshaler.java +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanStatusMarshaler.java @@ -19,12 +19,7 @@ final class SpanStatusMarshaler extends MarshalerWithSize { private final byte[] descriptionUtf8; static SpanStatusMarshaler create(StatusData status) { - ProtoEnumInfo protoStatusCode = Status.StatusCode.STATUS_CODE_UNSET; - if (status.getStatusCode() == StatusCode.OK) { - protoStatusCode = Status.StatusCode.STATUS_CODE_OK; - } else if (status.getStatusCode() == StatusCode.ERROR) { - protoStatusCode = Status.StatusCode.STATUS_CODE_ERROR; - } + ProtoEnumInfo protoStatusCode = toProtoSpanStatus(status); byte[] description = MarshalerUtil.toBytes(status.getDescription()); return new SpanStatusMarshaler(protoStatusCode, description); } @@ -47,4 +42,14 @@ private static int computeSize(ProtoEnumInfo protoStatusCode, byte[] description size += MarshalerUtil.sizeEnum(Status.CODE, protoStatusCode); return size; } + + static ProtoEnumInfo toProtoSpanStatus(StatusData status) { + ProtoEnumInfo protoStatusCode = Status.StatusCode.STATUS_CODE_UNSET; + if (status.getStatusCode() == StatusCode.OK) { + protoStatusCode = Status.StatusCode.STATUS_CODE_OK; + } else if (status.getStatusCode() == StatusCode.ERROR) { + protoStatusCode = Status.StatusCode.STATUS_CODE_ERROR; + } + return protoStatusCode; + } } diff --git a/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanStatusStatelessMarshaler.java b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanStatusStatelessMarshaler.java new file mode 100644 index 00000000000..0609c0e6829 --- /dev/null +++ b/exporters/otlp/common/src/main/java/io/opentelemetry/exporter/internal/otlp/traces/SpanStatusStatelessMarshaler.java @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.traces; + +import static io.opentelemetry.exporter.internal.otlp.traces.SpanStatusMarshaler.toProtoSpanStatus; + +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.ProtoEnumInfo; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshalerUtil; +import io.opentelemetry.proto.trace.v1.internal.Status; +import io.opentelemetry.sdk.trace.data.StatusData; +import java.io.IOException; + +/** See {@link SpanStatusMarshaler}. */ +final class SpanStatusStatelessMarshaler implements StatelessMarshaler { + static final SpanStatusStatelessMarshaler INSTANCE = new SpanStatusStatelessMarshaler(); + + private SpanStatusStatelessMarshaler() {} + + @Override + public void writeTo(Serializer output, StatusData status, MarshalerContext context) + throws IOException { + ProtoEnumInfo protoStatusCode = toProtoSpanStatus(status); + + output.serializeStringWithContext(Status.MESSAGE, status.getDescription(), context); + output.serializeEnum(Status.CODE, protoStatusCode); + } + + @Override + public int getBinarySerializedSize(StatusData status, MarshalerContext context) { + ProtoEnumInfo protoStatusCode = toProtoSpanStatus(status); + + int size = 0; + size += + StatelessMarshalerUtil.sizeStringWithContext( + Status.MESSAGE, status.getDescription(), context); + size += MarshalerUtil.sizeEnum(Status.CODE, protoStatusCode); + + return size; + } +} diff --git a/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/AttributeKeyValueTest.java b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/AttributeKeyValueTest.java new file mode 100644 index 00000000000..220b3845352 --- /dev/null +++ b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/AttributeKeyValueTest.java @@ -0,0 +1,49 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.AttributeType; +import io.opentelemetry.api.common.Attributes; +import java.util.Collections; +import java.util.List; +import nl.jqno.equalsverifier.EqualsVerifier; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +class AttributeKeyValueTest { + + @Test + void equalsVerifier() { + EqualsVerifier.forClass(AttributeKeyValue.class).verify(); + } + + @Test + void ofEmpty() { + assertThat(AttributeKeyValue.of(Attributes.empty())).isEmpty(); + } + + @Test + void ofOne() { + AttributeKeyValue input = AttributeKeyValue.of(AttributeKey.stringKey("foo"), "bar"); + Attributes attributes = Attributes.of(input.getAttributeKey(), input.getValue()); + List> list = AttributeKeyValue.of(attributes); + Assertions.assertThat(list).hasSize(1); + assertThat(list.get(0)).isEqualTo(input); + } + + @Test + void ofList() { + AttributeKeyValue> input = + AttributeKeyValue.of(AttributeKey.longArrayKey("foo"), Collections.emptyList()); + Attributes attributes = Attributes.of(input.getAttributeKey(), input.getValue()); + List> list = AttributeKeyValue.of(attributes); + Assertions.assertThat(list).hasSize(1); + assertThat(list.get(0).getAttributeKey().getType()).isEqualTo(AttributeType.LONG_ARRAY); + } +} diff --git a/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/ValueMarshalerTest.java b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/ValueMarshalerTest.java new file mode 100644 index 00000000000..d9e38900b7e --- /dev/null +++ b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/ValueMarshalerTest.java @@ -0,0 +1,199 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp; + +import static io.opentelemetry.api.common.Value.of; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.params.provider.Arguments.arguments; + +import com.google.protobuf.ByteString; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.Message; +import com.google.protobuf.util.JsonFormat; +import io.opentelemetry.api.common.KeyValue; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; +import io.opentelemetry.proto.common.v1.AnyValue; +import io.opentelemetry.proto.common.v1.ArrayValue; +import io.opentelemetry.proto.common.v1.KeyValueList; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.stream.Stream; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +@SuppressWarnings("BadImport") +class ValueMarshalerTest { + + @ParameterizedTest + @MethodSource("serializeAnyValueArgs") + void anyValueString_StatefulMarshaler(Value value, AnyValue expectedSerializedValue) { + MarshalerWithSize marshaler = AnyValueMarshaler.create(value); + AnyValue serializedValue = parse(AnyValue.getDefaultInstance(), marshaler); + assertThat(serializedValue).isEqualTo(expectedSerializedValue); + } + + @ParameterizedTest + @MethodSource("serializeAnyValueArgs") + void anyValueString_StatelessMarshaler(Value value, AnyValue expectedSerializedValue) { + Marshaler marshaler = createMarshaler(AnyValueStatelessMarshaler.INSTANCE, value); + AnyValue serializedValue = parse(AnyValue.getDefaultInstance(), marshaler); + assertThat(serializedValue).isEqualTo(expectedSerializedValue); + } + + private static Stream serializeAnyValueArgs() { + return Stream.of( + // primitives + arguments(of("str"), AnyValue.newBuilder().setStringValue("str").build()), + arguments(of(true), AnyValue.newBuilder().setBoolValue(true).build()), + arguments(of(1), AnyValue.newBuilder().setIntValue(1).build()), + arguments(of(1.1), AnyValue.newBuilder().setDoubleValue(1.1).build()), + // heterogeneous array + arguments( + of(of("str"), of(true), of(1), of(1.1)), + AnyValue.newBuilder() + .setArrayValue( + ArrayValue.newBuilder() + .addValues(AnyValue.newBuilder().setStringValue("str").build()) + .addValues(AnyValue.newBuilder().setBoolValue(true).build()) + .addValues(AnyValue.newBuilder().setIntValue(1).build()) + .addValues(AnyValue.newBuilder().setDoubleValue(1.1).build()) + .build()) + .build()), + // map + arguments( + of(KeyValue.of("key1", of("val1")), KeyValue.of("key2", of(2))), + AnyValue.newBuilder() + .setKvlistValue( + KeyValueList.newBuilder() + .addValues( + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() + .setKey("key1") + .setValue(AnyValue.newBuilder().setStringValue("val1").build()) + .build()) + .addValues( + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() + .setKey("key2") + .setValue(AnyValue.newBuilder().setIntValue(2).build()) + .build()) + .build()) + .build()), + // map of maps + arguments( + of( + Collections.singletonMap( + "child", of(Collections.singletonMap("grandchild", of("str"))))), + AnyValue.newBuilder() + .setKvlistValue( + KeyValueList.newBuilder() + .addValues( + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() + .setKey("child") + .setValue( + AnyValue.newBuilder() + .setKvlistValue( + KeyValueList.newBuilder() + .addValues( + io.opentelemetry.proto.common.v1.KeyValue + .newBuilder() + .setKey("grandchild") + .setValue( + AnyValue.newBuilder() + .setStringValue("str") + .build()) + .build()) + .build()) + .build()) + .build()) + .build()) + .build()), + // bytes + arguments( + of("hello world".getBytes(StandardCharsets.UTF_8)), + AnyValue.newBuilder() + .setBytesValue(ByteString.copyFrom("hello world".getBytes(StandardCharsets.UTF_8))) + .build())); + } + + @SuppressWarnings("unchecked") + private static T parse(T prototype, Marshaler marshaler) { + byte[] serialized = toByteArray(marshaler); + T result; + try { + result = (T) prototype.newBuilderForType().mergeFrom(serialized).build(); + } catch (InvalidProtocolBufferException e) { + throw new UncheckedIOException(e); + } + // Our marshaler should produce the exact same length of serialized output (for example, field + // default values are not outputted), so we check that here. The output itself may have slightly + // different ordering, mostly due to the way we don't output oneof values in field order all the + // tieme. If the lengths are equal and the resulting protos are equal, the marshaling is + // guaranteed to be valid. + assertThat(result.getSerializedSize()).isEqualTo(serialized.length); + + // We don't compare JSON strings due to some differences (particularly serializing enums as + // numbers instead of names). This may improve in the future but what matters is what we produce + // can be parsed. + String json = toJson(marshaler); + Message.Builder builder = prototype.newBuilderForType(); + try { + JsonFormat.parser().merge(json, builder); + } catch (InvalidProtocolBufferException e) { + throw new UncheckedIOException(e); + } + + assertThat(builder.build()).isEqualTo(result); + + return result; + } + + private static byte[] toByteArray(Marshaler marshaler) { + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + try { + marshaler.writeBinaryTo(bos); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + return bos.toByteArray(); + } + + private static String toJson(Marshaler marshaler) { + + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + try { + marshaler.writeJsonTo(bos); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + return new String(bos.toByteArray(), StandardCharsets.UTF_8); + } + + private static Marshaler createMarshaler(StatelessMarshaler marshaler, T data) { + return new Marshaler() { + private final MarshalerContext context = new MarshalerContext(); + private final int size = marshaler.getBinarySerializedSize(data, context); + + @Override + public int getBinarySerializedSize() { + return size; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + context.resetReadIndex(); + marshaler.writeTo(output, data, context); + } + }; + } +} diff --git a/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/logs/LogsRequestMarshalerTest.java b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/logs/LogsRequestMarshalerTest.java index 306cfe1ea14..0e54a10f0e2 100644 --- a/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/logs/LogsRequestMarshalerTest.java +++ b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/logs/LogsRequestMarshalerTest.java @@ -21,6 +21,9 @@ import io.opentelemetry.api.trace.TraceId; import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; import io.opentelemetry.proto.common.v1.AnyValue; import io.opentelemetry.proto.common.v1.InstrumentationScope; import io.opentelemetry.proto.common.v1.KeyValue; @@ -28,8 +31,9 @@ import io.opentelemetry.proto.logs.v1.ResourceLogs; import io.opentelemetry.proto.logs.v1.ScopeLogs; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.data.LogRecordData; import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.testing.logs.TestLogRecordData; +import io.opentelemetry.sdk.testing.logs.internal.TestExtendedLogRecordData; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.UncheckedIOException; @@ -39,6 +43,8 @@ import java.util.Locale; import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; class LogsRequestMarshalerTest { private static final byte[] TRACE_ID_BYTES = @@ -46,6 +52,7 @@ class LogsRequestMarshalerTest { private static final String TRACE_ID = TraceId.fromBytes(TRACE_ID_BYTES); private static final byte[] SPAN_ID_BYTES = new byte[] {0, 0, 0, 0, 4, 3, 2, 1}; private static final String SPAN_ID = SpanId.fromBytes(SPAN_ID_BYTES); + private static final String EVENT_NAME = "hello"; private static final String BODY = "Hello world from this log..."; @Test @@ -53,7 +60,7 @@ void toProtoResourceLogs() { ResourceLogsMarshaler[] resourceLogsMarshalers = ResourceLogsMarshaler.create( Collections.singleton( - TestLogRecordData.builder() + TestExtendedLogRecordData.builder() .setResource( Resource.builder().put("one", 1).setSchemaUrl("http://url").build()) .setInstrumentationScopeInfo( @@ -62,6 +69,7 @@ void toProtoResourceLogs() { .setSchemaUrl("http://url") .setAttributes(Attributes.builder().put("key", "value").build()) .build()) + .setEventName(EVENT_NAME) .setBody(BODY) .setSeverity(Severity.INFO) .setSeverityText("INFO") @@ -95,17 +103,19 @@ void toProtoResourceLogs() { .build()); } - @Test - void toProtoLogRecord() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoLogRecord(MarshalerSource marshalerSource) { LogRecord logRecord = parse( LogRecord.getDefaultInstance(), - LogMarshaler.create( - TestLogRecordData.builder() + marshalerSource.create( + TestExtendedLogRecordData.builder() .setResource( Resource.create(Attributes.builder().put("testKey", "testValue").build())) .setInstrumentationScopeInfo( InstrumentationScopeInfo.builder("instrumentation").setVersion("1").build()) + .setEventName(EVENT_NAME) .setBody(BODY) .setSeverity(Severity.INFO) .setSeverityText("INFO") @@ -121,6 +131,7 @@ void toProtoLogRecord() { assertThat(logRecord.getTraceId().toByteArray()).isEqualTo(TRACE_ID_BYTES); assertThat(logRecord.getSpanId().toByteArray()).isEqualTo(SPAN_ID_BYTES); assertThat(logRecord.getSeverityText()).isEqualTo("INFO"); + assertThat(logRecord.getEventName()).isEqualTo(EVENT_NAME); assertThat(logRecord.getBody()).isEqualTo(AnyValue.newBuilder().setStringValue(BODY).build()); assertThat(logRecord.getAttributesList()) .containsExactly( @@ -133,13 +144,14 @@ void toProtoLogRecord() { assertThat(logRecord.getObservedTimeUnixNano()).isEqualTo(6789); } - @Test - void toProtoLogRecord_MinimalFields() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoLogRecord_MinimalFields(MarshalerSource marshalerSource) { LogRecord logRecord = parse( LogRecord.getDefaultInstance(), - LogMarshaler.create( - TestLogRecordData.builder() + marshalerSource.create( + TestExtendedLogRecordData.builder() .setResource( Resource.create(Attributes.builder().put("testKey", "testValue").build())) .setInstrumentationScopeInfo( @@ -153,7 +165,7 @@ void toProtoLogRecord_MinimalFields() { assertThat(logRecord.getSeverityText()).isBlank(); assertThat(logRecord.getSeverityNumber().getNumber()) .isEqualTo(Severity.UNDEFINED_SEVERITY_NUMBER.getSeverityNumber()); - assertThat(logRecord.getBody()).isEqualTo(AnyValue.newBuilder().setStringValue("").build()); + assertThat(logRecord.getBody()).isEqualTo(AnyValue.newBuilder().build()); assertThat(logRecord.getAttributesList()).isEmpty(); assertThat(logRecord.getDroppedAttributesCount()).isZero(); assertThat(logRecord.getTimeUnixNano()).isEqualTo(12345); @@ -239,4 +251,39 @@ private static String toJson(Marshaler marshaler) { } return new String(bos.toByteArray(), StandardCharsets.UTF_8); } + + private static Marshaler createMarshaler(StatelessMarshaler marshaler, T data) { + return new Marshaler() { + private final MarshalerContext context = new MarshalerContext(); + private final int size = marshaler.getBinarySerializedSize(data, context); + + @Override + public int getBinarySerializedSize() { + return size; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + context.resetReadIndex(); + marshaler.writeTo(output, data, context); + } + }; + } + + private enum MarshalerSource { + MARSHALER { + @Override + Marshaler create(LogRecordData logData) { + return LogMarshaler.create(logData); + } + }, + LOW_ALLOCATION_MARSHALER { + @Override + Marshaler create(LogRecordData logData) { + return createMarshaler(LogStatelessMarshaler.INSTANCE, logData); + } + }; + + abstract Marshaler create(LogRecordData logData); + } } diff --git a/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/logs/LowAllocationLogRequestMarshalerTest.java b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/logs/LowAllocationLogRequestMarshalerTest.java new file mode 100644 index 00000000000..2d8d61e207d --- /dev/null +++ b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/logs/LowAllocationLogRequestMarshalerTest.java @@ -0,0 +1,144 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.logs; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.TraceFlags; +import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.logs.internal.TestExtendedLogRecordData; +import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.junit.jupiter.api.Test; + +class LowAllocationLogRequestMarshalerTest { + + private static final AttributeKey KEY_BOOL = AttributeKey.booleanKey("key_bool"); + private static final AttributeKey KEY_STRING = AttributeKey.stringKey("key_string"); + private static final AttributeKey KEY_INT = AttributeKey.longKey("key_int"); + private static final AttributeKey KEY_DOUBLE = AttributeKey.doubleKey("key_double"); + private static final AttributeKey> KEY_STRING_ARRAY = + AttributeKey.stringArrayKey("key_string_array"); + private static final AttributeKey> KEY_LONG_ARRAY = + AttributeKey.longArrayKey("key_long_array"); + private static final AttributeKey> KEY_DOUBLE_ARRAY = + AttributeKey.doubleArrayKey("key_double_array"); + private static final AttributeKey> KEY_BOOLEAN_ARRAY = + AttributeKey.booleanArrayKey("key_boolean_array"); + private static final String EVENT_NAME = "hello"; + private static final String BODY = "Hello world from this log..."; + + private static final Resource RESOURCE = + Resource.create( + Attributes.builder() + .put(KEY_BOOL, true) + .put(KEY_STRING, "string") + .put(KEY_INT, 100L) + .put(KEY_DOUBLE, 100.3) + .put(KEY_STRING_ARRAY, Arrays.asList("string", "string")) + .put(KEY_LONG_ARRAY, Arrays.asList(12L, 23L)) + .put(KEY_DOUBLE_ARRAY, Arrays.asList(12.3, 23.1)) + .put(KEY_BOOLEAN_ARRAY, Arrays.asList(true, false)) + .build()); + + private static final InstrumentationScopeInfo INSTRUMENTATION_SCOPE_INFO = + InstrumentationScopeInfo.create("name"); + private static final String TRACE_ID = "7b2e170db4df2d593ddb4ddf2ddf2d59"; + private static final String SPAN_ID = "170d3ddb4d23e81f"; + private static final SpanContext SPAN_CONTEXT = + SpanContext.create(TRACE_ID, SPAN_ID, TraceFlags.getSampled(), TraceState.getDefault()); + + private final List logRecordDataList = createLogRecordDataList(); + + private static List createLogRecordDataList() { + List logRecordDataList = new ArrayList<>(); + for (int i = 0; i < 5; i++) { + logRecordDataList.add(createLogRecordData()); + } + return logRecordDataList; + } + + private static LogRecordData createLogRecordData() { + return TestExtendedLogRecordData.builder() + .setResource(RESOURCE) + .setInstrumentationScopeInfo(INSTRUMENTATION_SCOPE_INFO) + .setEventName(EVENT_NAME) + .setBody(BODY) + .setSeverity(Severity.INFO) + .setSeverityText("INFO") + .setSpanContext(SPAN_CONTEXT) + .setAttributes( + Attributes.builder() + .put(KEY_BOOL, true) + .put(KEY_STRING, "string") + .put(KEY_INT, 100L) + .put(KEY_DOUBLE, 100.3) + .build()) + .setTotalAttributeCount(2) + .setTimestamp(12345, TimeUnit.NANOSECONDS) + .setObservedTimestamp(6789, TimeUnit.NANOSECONDS) + .build(); + } + + @Test + void validateOutput() throws Exception { + byte[] result; + { + LogsRequestMarshaler requestMarshaler = LogsRequestMarshaler.create(logRecordDataList); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeBinaryTo(customOutput); + result = customOutput.toByteArray(); + } + + byte[] lowAllocationResult; + { + LowAllocationLogsRequestMarshaler requestMarshaler = new LowAllocationLogsRequestMarshaler(); + requestMarshaler.initialize(logRecordDataList); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeBinaryTo(customOutput); + lowAllocationResult = customOutput.toByteArray(); + } + + assertThat(lowAllocationResult).isEqualTo(result); + } + + @Test + void validateJsonOutput() throws Exception { + String result; + { + LogsRequestMarshaler requestMarshaler = LogsRequestMarshaler.create(logRecordDataList); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeJsonTo(customOutput); + result = new String(customOutput.toByteArray(), StandardCharsets.UTF_8); + } + + String lowAllocationResult; + { + LowAllocationLogsRequestMarshaler requestMarshaler = new LowAllocationLogsRequestMarshaler(); + requestMarshaler.initialize(logRecordDataList); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeJsonTo(customOutput); + lowAllocationResult = new String(customOutput.toByteArray(), StandardCharsets.UTF_8); + } + + assertThat(lowAllocationResult).isEqualTo(result); + } +} diff --git a/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/metrics/LowAllocationMetricsRequestMarshalerTest.java b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/metrics/LowAllocationMetricsRequestMarshalerTest.java new file mode 100644 index 00000000000..3efc938993f --- /dev/null +++ b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/metrics/LowAllocationMetricsRequestMarshalerTest.java @@ -0,0 +1,408 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.metrics; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Named.named; +import static org.junit.jupiter.params.provider.Arguments.arguments; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleCounter; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.DoubleUpDownCounter; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.TraceFlags; +import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.InstrumentSelector; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.View; +import io.opentelemetry.sdk.metrics.data.ExemplarData; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.data.SummaryData; +import io.opentelemetry.sdk.metrics.data.SummaryPointData; +import io.opentelemetry.sdk.metrics.data.ValueAtQuantile; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongExemplarData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableValueAtQuantile; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.function.Consumer; +import java.util.stream.Stream; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; + +class LowAllocationMetricsRequestMarshalerTest { + + @ParameterizedTest + @ArgumentsSource(MetricsProvider.class) + void validateOutput(Collection metrics) throws Exception { + byte[] result; + { + MetricsRequestMarshaler requestMarshaler = MetricsRequestMarshaler.create(metrics); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeBinaryTo(customOutput); + result = customOutput.toByteArray(); + } + + byte[] lowAllocationResult; + { + LowAllocationMetricsRequestMarshaler requestMarshaler = + new LowAllocationMetricsRequestMarshaler(); + requestMarshaler.initialize(metrics); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeBinaryTo(customOutput); + lowAllocationResult = customOutput.toByteArray(); + } + + assertThat(lowAllocationResult).isEqualTo(result); + } + + @ParameterizedTest + @ArgumentsSource(MetricsProvider.class) + void validateJsonOutput(Collection metrics) throws Exception { + String result; + { + MetricsRequestMarshaler requestMarshaler = MetricsRequestMarshaler.create(metrics); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeJsonTo(customOutput); + result = new String(customOutput.toByteArray(), StandardCharsets.UTF_8); + } + + String lowAllocationResult; + { + LowAllocationMetricsRequestMarshaler requestMarshaler = + new LowAllocationMetricsRequestMarshaler(); + requestMarshaler.initialize(metrics); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeJsonTo(customOutput); + lowAllocationResult = new String(customOutput.toByteArray(), StandardCharsets.UTF_8); + } + + assertThat(lowAllocationResult).isEqualTo(result); + } + + @ParameterizedTest + @ArgumentsSource(ExemplarProvider.class) + void validateExemplar(ExemplarData exemplar) throws Exception { + byte[] result; + { + Marshaler marshaler = ExemplarMarshaler.create(exemplar); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(marshaler.getBinarySerializedSize()); + marshaler.writeBinaryTo(customOutput); + result = customOutput.toByteArray(); + } + + byte[] lowAllocationResult; + { + MarshalerContext context = new MarshalerContext(); + class TestMarshaler extends MarshalerWithSize { + + protected TestMarshaler() { + super(ExemplarStatelessMarshaler.INSTANCE.getBinarySerializedSize(exemplar, context)); + } + + @Override + protected void writeTo(Serializer output) throws IOException { + ExemplarStatelessMarshaler.INSTANCE.writeTo(output, exemplar, context); + } + } + Marshaler marshaler = new TestMarshaler(); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(marshaler.getBinarySerializedSize()); + marshaler.writeBinaryTo(customOutput); + lowAllocationResult = customOutput.toByteArray(); + } + + assertThat(lowAllocationResult).isEqualTo(result); + } + + @Test + void validateSummary() throws Exception { + List percentileValues = + Arrays.asList(ImmutableValueAtQuantile.create(3.0, 4.0)); + List points = + Arrays.asList( + ImmutableSummaryPointData.create( + 12345, 12346, Attributes.empty(), 1, 2.0, percentileValues)); + SummaryData summary = ImmutableSummaryData.create(points); + + byte[] result; + { + Marshaler marshaler = SummaryMarshaler.create(summary); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(marshaler.getBinarySerializedSize()); + marshaler.writeBinaryTo(customOutput); + result = customOutput.toByteArray(); + } + + byte[] lowAllocationResult; + { + MarshalerContext context = new MarshalerContext(); + class TestMarshaler extends MarshalerWithSize { + + protected TestMarshaler() { + super(SummaryStatelessMarshaler.INSTANCE.getBinarySerializedSize(summary, context)); + } + + @Override + protected void writeTo(Serializer output) throws IOException { + SummaryStatelessMarshaler.INSTANCE.writeTo(output, summary, context); + } + } + Marshaler marshaler = new TestMarshaler(); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(marshaler.getBinarySerializedSize()); + marshaler.writeBinaryTo(customOutput); + lowAllocationResult = customOutput.toByteArray(); + } + + assertThat(lowAllocationResult).isEqualTo(result); + } + + private static Collection metrics(Consumer metricProducer) { + InMemoryMetricReader metricReader = InMemoryMetricReader.create(); + SdkMeterProvider meterProvider = + SdkMeterProvider.builder() + .registerMetricReader(metricReader) + .registerView( + InstrumentSelector.builder().setName("exponentialhistogram").build(), + View.builder() + .setAggregation(Aggregation.base2ExponentialBucketHistogram()) + .build()) + .setResource( + Resource.create( + Attributes.builder() + .put(AttributeKey.booleanKey("key_bool"), true) + .put(AttributeKey.stringKey("key_string"), "string") + .put(AttributeKey.longKey("key_int"), 100L) + .put(AttributeKey.doubleKey("key_double"), 100.3) + .put( + AttributeKey.stringArrayKey("key_string_array"), + Arrays.asList("string", "string")) + .put(AttributeKey.longArrayKey("key_long_array"), Arrays.asList(12L, 23L)) + .put( + AttributeKey.doubleArrayKey("key_double_array"), + Arrays.asList(12.3, 23.1)) + .put( + AttributeKey.booleanArrayKey("key_boolean_array"), + Arrays.asList(true, false)) + .build())) + .build(); + metricProducer.accept(meterProvider); + + return metricReader.collectAllMetrics(); + } + + private static class MetricsProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) { + return Stream.of( + arguments( + named( + "long gauge", + metrics( + meterProvider -> + meterProvider + .get("long gauge") + .gaugeBuilder("gauge") + .setDescription("gauge description") + .setUnit("unit") + .ofLongs() + .buildWithCallback( + measurement -> + measurement.record( + 5, + Attributes.of( + AttributeKey.stringKey("key"), "value")))))), + arguments( + named( + "long counter", + metrics( + meterProvider -> { + LongCounter longCounter = + meterProvider + .get("long counter") + .counterBuilder("counter") + .setDescription("counter description") + .setUnit("unit") + .build(); + longCounter.add(1); + longCounter.add(2, Attributes.of(AttributeKey.longKey("lives"), 9L)); + longCounter.add(3); + }))), + arguments( + named( + "long updowncounter", + metrics( + meterProvider -> { + LongUpDownCounter longUpDownCounter = + meterProvider + .get("long updowncounter") + .upDownCounterBuilder("updowncounter") + .setDescription("updowncounter description") + .setUnit("unit") + .build(); + longUpDownCounter.add(1); + longUpDownCounter.add( + -1, Attributes.of(AttributeKey.booleanKey("on"), true)); + longUpDownCounter.add(1); + }))), + arguments( + named( + "double gauge", + metrics( + meterProvider -> + meterProvider + .get("double gauge") + .gaugeBuilder("doublegauge") + .setDescription("doublegauge") + .setUnit("unit") + .buildWithCallback(measurement -> measurement.record(5.0))))), + arguments( + named( + "double counter", + metrics( + meterProvider -> { + DoubleCounter doubleCounter = + meterProvider + .get("double counter") + .counterBuilder("doublecounter") + .ofDoubles() + .build(); + doubleCounter.add(1.0); + doubleCounter.add(2.0); + }))), + arguments( + named( + "double updowncounter", + metrics( + meterProvider -> { + DoubleUpDownCounter doubleUpDownCounter = + meterProvider + .get("double updowncounter") + .upDownCounterBuilder("doubleupdown") + .ofDoubles() + .build(); + doubleUpDownCounter.add(1.0); + doubleUpDownCounter.add(-1.0); + }))), + arguments( + named( + "double histogram", + metrics( + meterProvider -> { + DoubleHistogram histogram = + meterProvider + .get("double histogram") + .histogramBuilder("histogram") + .build(); + histogram.record(1.0); + histogram.record(2.0); + histogram.record(3.0); + histogram.record(4.0); + histogram.record(5.0); + }))), + arguments( + named( + "long histogram", + metrics( + meterProvider -> { + LongHistogram histogram = + meterProvider + .get("long histogram") + .histogramBuilder("histogram") + .ofLongs() + .build(); + histogram.record(1); + histogram.record(2); + histogram.record(3); + histogram.record(4); + histogram.record(5); + }))), + arguments( + named( + "double exponential histogram", + metrics( + meterProvider -> { + DoubleHistogram histogram = + meterProvider + .get("double exponential histogram") + .histogramBuilder("exponentialhistogram") + .build(); + histogram.record(1.0); + histogram.record(2.0); + histogram.record(3.0); + histogram.record(4.0); + histogram.record(5.0); + }))), + arguments( + named( + "long exponential histogram", + metrics( + meterProvider -> { + DoubleHistogram histogram = + meterProvider + .get("long exponential histogram") + .histogramBuilder("exponentialhistogram") + .build(); + histogram.record(1); + histogram.record(2); + histogram.record(3); + histogram.record(4); + histogram.record(5); + })))); + } + } + + private static class ExemplarProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) { + SpanContext spanContext = + SpanContext.create( + "7b2e170db4df2d593ddb4ddf2ddf2d59", + "170d3ddb4d23e81f", + TraceFlags.getSampled(), + TraceState.getDefault()); + + return Stream.of( + arguments( + named( + "double exemplar", + ImmutableDoubleExemplarData.create(Attributes.empty(), 12345, spanContext, 5.0))), + arguments( + named( + "long exemplar", + ImmutableLongExemplarData.create(Attributes.empty(), 12345, spanContext, 5)))); + } + } +} diff --git a/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/metrics/MetricsRequestMarshalerTest.java b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/metrics/MetricsRequestMarshalerTest.java index fc92bb29cc9..e2f6563b942 100644 --- a/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/metrics/MetricsRequestMarshalerTest.java +++ b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/metrics/MetricsRequestMarshalerTest.java @@ -22,6 +22,9 @@ import io.opentelemetry.api.trace.TraceFlags; import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; import io.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest; import io.opentelemetry.proto.common.v1.AnyValue; import io.opentelemetry.proto.common.v1.InstrumentationScope; @@ -40,6 +43,7 @@ import io.opentelemetry.proto.metrics.v1.Summary; import io.opentelemetry.proto.metrics.v1.SummaryDataPoint; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData; import io.opentelemetry.sdk.metrics.data.HistogramPointData; @@ -61,6 +65,8 @@ import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryPointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableValueAtQuantile; +import io.opentelemetry.sdk.metrics.internal.data.MutableExponentialHistogramBuckets; +import io.opentelemetry.sdk.metrics.internal.data.MutableExponentialHistogramPointData; import io.opentelemetry.sdk.resources.Resource; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -72,7 +78,8 @@ import java.util.List; import java.util.Locale; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; // Fill deprecated APIs before removing them after users get a chance to migrate. class MetricsRequestMarshalerTest { @@ -83,10 +90,12 @@ private static AnyValue stringValue(String v) { return AnyValue.newBuilder().setStringValue(v).build(); } - @Test - void dataPoint_withDefaultValues() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void dataPoint_withDefaultValues(MarshalerSource marshalerSource) { assertThat( toNumberDataPoints( + marshalerSource, singletonList( ImmutableLongPointData.create( 123, @@ -129,6 +138,7 @@ void dataPoint_withDefaultValues() { assertThat( toNumberDataPoints( + marshalerSource, singletonList( ImmutableDoublePointData.create( 123, @@ -170,11 +180,13 @@ void dataPoint_withDefaultValues() { .build()); } - @Test - void longDataPoints() { - assertThat(toNumberDataPoints(Collections.emptyList())).isEmpty(); + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void longDataPoints(MarshalerSource marshalerSource) { + assertThat(toNumberDataPoints(marshalerSource, Collections.emptyList())).isEmpty(); assertThat( toNumberDataPoints( + marshalerSource, singletonList( ImmutableLongPointData.create( 123, @@ -216,6 +228,7 @@ void longDataPoints() { .build()); assertThat( toNumberDataPoints( + marshalerSource, ImmutableList.of( ImmutableLongPointData.create(123, 456, Attributes.empty(), 5), ImmutableLongPointData.create(321, 654, KV_ATTR, 7)))) @@ -235,11 +248,13 @@ void longDataPoints() { .build()); } - @Test - void doubleDataPoints() { - assertThat(toNumberDataPoints(Collections.emptyList())).isEmpty(); + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void doubleDataPoints(MarshalerSource marshalerSource) { + assertThat(toNumberDataPoints(marshalerSource, Collections.emptyList())).isEmpty(); assertThat( toNumberDataPoints( + marshalerSource, singletonList(ImmutableDoublePointData.create(123, 456, KV_ATTR, 5.1)))) .containsExactly( NumberDataPoint.newBuilder() @@ -252,6 +267,7 @@ void doubleDataPoints() { .build()); assertThat( toNumberDataPoints( + marshalerSource, ImmutableList.of( ImmutableDoublePointData.create(123, 456, Attributes.empty(), 5.1), ImmutableDoublePointData.create(321, 654, KV_ATTR, 7.1)))) @@ -271,10 +287,12 @@ void doubleDataPoints() { .build()); } - @Test - void summaryDataPoints() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void summaryDataPoints(MarshalerSource marshalerSource) { assertThat( toSummaryDataPoints( + marshalerSource, singletonList( ImmutableSummaryPointData.create( 123, @@ -300,6 +318,7 @@ void summaryDataPoints() { .build()); assertThat( toSummaryDataPoints( + marshalerSource, ImmutableList.of( ImmutableSummaryPointData.create( 123, 456, Attributes.empty(), 7, 15.3, Collections.emptyList()), @@ -340,10 +359,12 @@ void summaryDataPoints() { .build()); } - @Test - void histogramDataPoints() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void histogramDataPoints(MarshalerSource marshalerSource) { assertThat( toHistogramDataPoints( + marshalerSource, ImmutableList.of( ImmutableHistogramPointData.create( 123, @@ -415,10 +436,12 @@ void histogramDataPoints() { .build()); } - @Test - void exponentialHistogramDataPoints() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void exponentialHistogramDataPoints(MarshalerSource marshalerSource) { assertThat( toExponentialHistogramDataPoints( + marshalerSource, ImmutableList.of( ImmutableExponentialHistogramPointData.create( 0, @@ -508,10 +531,113 @@ void exponentialHistogramDataPoints() { .build()); } - @Test - void toProtoMetric_monotonic() { + @SuppressWarnings("PointlessArithmeticExpression") + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void exponentialHistogramReusableDataPoints(MarshalerSource marshalerSource) { + assertThat( + toExponentialHistogramDataPoints( + marshalerSource, + ImmutableList.of( + new MutableExponentialHistogramPointData() + .set( + 0, + 123.4, + 1, + /* hasMin= */ false, + 0, + /* hasMax= */ false, + 0, + new MutableExponentialHistogramBuckets() + .set(0, 0, 0, DynamicPrimitiveLongList.empty()), + new MutableExponentialHistogramBuckets() + .set(0, 0, 0, DynamicPrimitiveLongList.empty()), + 123, + 456, + Attributes.empty(), + Collections.emptyList()), + new MutableExponentialHistogramPointData() + .set( + 0, + 123.4, + 1, + /* hasMin= */ true, + 3.3, + /* hasMax= */ true, + 80.1, + new MutableExponentialHistogramBuckets() + .set(0, 1, 1 + 0 + 2, DynamicPrimitiveLongList.of(1L, 0L, 2L)), + new MutableExponentialHistogramBuckets() + .set(0, 0, 0, DynamicPrimitiveLongList.empty()), + 123, + 456, + Attributes.of(stringKey("key"), "value"), + ImmutableList.of( + ImmutableDoubleExemplarData.create( + Attributes.of(stringKey("test"), "value"), + 2, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 1.5)))))) + .containsExactly( + ExponentialHistogramDataPoint.newBuilder() + .setStartTimeUnixNano(123) + .setTimeUnixNano(456) + .setCount(1) + .setScale(0) + .setSum(123.4) + .setZeroCount(1) + .setPositive( + ExponentialHistogramDataPoint.Buckets.newBuilder().setOffset(0)) // no buckets + .setNegative( + ExponentialHistogramDataPoint.Buckets.newBuilder().setOffset(0)) // no buckets + .build(), + ExponentialHistogramDataPoint.newBuilder() + .setStartTimeUnixNano(123) + .setTimeUnixNano(456) + .setCount(4) // Counts in positive, negative, and zero count. + .addAllAttributes( + singletonList( + KeyValue.newBuilder().setKey("key").setValue(stringValue("value")).build())) + .setScale(0) + .setSum(123.4) + .setMin(3.3) + .setMax(80.1) + .setZeroCount(1) + .setPositive( + ExponentialHistogramDataPoint.Buckets.newBuilder() + .setOffset(1) + .addBucketCounts(1) + .addBucketCounts(0) + .addBucketCounts(2)) + .setNegative( + ExponentialHistogramDataPoint.Buckets.newBuilder().setOffset(0)) // no buckets + .addExemplars( + Exemplar.newBuilder() + .setTimeUnixNano(2) + .addFilteredAttributes( + KeyValue.newBuilder() + .setKey("test") + .setValue(stringValue("value")) + .build()) + .setSpanId(ByteString.copyFrom(new byte[] {0, 0, 0, 0, 0, 0, 0, 2})) + .setTraceId( + ByteString.copyFrom( + new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1})) + .setAsDouble(1.5) + .build()) + .build()); + } + + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoMetric_monotonic(MarshalerSource marshalerSource) { assertThat( toProtoMetric( + marshalerSource, ImmutableMetricData.createLongSum( Resource.empty(), InstrumentationScopeInfo.empty(), @@ -547,6 +673,7 @@ void toProtoMetric_monotonic() { .build()); assertThat( toProtoMetric( + marshalerSource, ImmutableMetricData.createDoubleSum( Resource.empty(), InstrumentationScopeInfo.empty(), @@ -582,10 +709,12 @@ void toProtoMetric_monotonic() { .build()); } - @Test - void toProtoMetric_nonMonotonic() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoMetric_nonMonotonic(MarshalerSource marshalerSource) { assertThat( toProtoMetric( + marshalerSource, ImmutableMetricData.createLongSum( Resource.empty(), InstrumentationScopeInfo.empty(), @@ -621,6 +750,7 @@ void toProtoMetric_nonMonotonic() { .build()); assertThat( toProtoMetric( + marshalerSource, ImmutableMetricData.createDoubleSum( Resource.empty(), InstrumentationScopeInfo.empty(), @@ -656,10 +786,12 @@ void toProtoMetric_nonMonotonic() { .build()); } - @Test - void toProtoMetric_gauges() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoMetric_gauges(MarshalerSource marshalerSource) { assertThat( toProtoMetric( + marshalerSource, ImmutableMetricData.createLongGauge( Resource.empty(), InstrumentationScopeInfo.empty(), @@ -691,6 +823,7 @@ void toProtoMetric_gauges() { .build()); assertThat( toProtoMetric( + marshalerSource, ImmutableMetricData.createDoubleGauge( Resource.empty(), InstrumentationScopeInfo.empty(), @@ -722,10 +855,12 @@ void toProtoMetric_gauges() { .build()); } - @Test - void toProtoMetric_summary() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoMetric_summary(MarshalerSource marshalerSource) { assertThat( toProtoMetric( + marshalerSource, ImmutableMetricData.createDoubleSummary( Resource.empty(), InstrumentationScopeInfo.empty(), @@ -777,10 +912,12 @@ void toProtoMetric_summary() { .build()); } - @Test - void toProtoMetric_histogram() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoMetric_histogram(MarshalerSource marshalerSource) { assertThat( toProtoMetric( + marshalerSource, ImmutableMetricData.createDoubleHistogram( Resource.empty(), InstrumentationScopeInfo.empty(), @@ -829,10 +966,12 @@ void toProtoMetric_histogram() { .build()); } - @Test - void toProtoMetric_exponentialHistogram() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoMetric_exponentialHistogram(MarshalerSource marshalerSource) { assertThat( toProtoMetric( + marshalerSource, ImmutableMetricData.createExponentialHistogram( Resource.empty(), InstrumentationScopeInfo.empty(), @@ -899,8 +1038,9 @@ void toProtoMetric_exponentialHistogram() { .build()); } - @Test - void protoResourceMetrics() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void protoResourceMetrics(MarshalerSource marshalerSource) { Resource resource = Resource.create(Attributes.of(stringKey("ka"), "va"), "http://resource.url"); io.opentelemetry.proto.resource.v1.Resource resourceProto = @@ -955,6 +1095,7 @@ void protoResourceMetrics() { assertThat( toProtoResourceMetrics( + marshalerSource, ImmutableList.of( ImmutableMetricData.createDoubleSum( resource, @@ -1028,55 +1169,48 @@ void protoResourceMetrics() { }); } - private static List toNumberDataPoints(Collection points) { + private static List toNumberDataPoints( + MarshalerSource marshalerSource, Collection points) { return points.stream() - .map( - point -> - parse(NumberDataPoint.getDefaultInstance(), NumberDataPointMarshaler.create(point))) + .map(point -> parse(NumberDataPoint.getDefaultInstance(), marshalerSource.create(point))) .collect(Collectors.toList()); } - private static List toSummaryDataPoints(Collection points) { + private static List toSummaryDataPoints( + MarshalerSource marshalerSource, Collection points) { return points.stream() - .map( - point -> - parse( - SummaryDataPoint.getDefaultInstance(), SummaryDataPointMarshaler.create(point))) + .map(point -> parse(SummaryDataPoint.getDefaultInstance(), marshalerSource.create(point))) .collect(Collectors.toList()); } private static List toHistogramDataPoints( - Collection points) { + MarshalerSource marshalerSource, Collection points) { return points.stream() - .map( - point -> - parse( - HistogramDataPoint.getDefaultInstance(), - HistogramDataPointMarshaler.create(point))) + .map(point -> parse(HistogramDataPoint.getDefaultInstance(), marshalerSource.create(point))) .collect(Collectors.toList()); } private static List toExponentialHistogramDataPoints( - Collection points) { + MarshalerSource marshalerSource, Collection points) { return points.stream() .map( point -> parse( ExponentialHistogramDataPoint.getDefaultInstance(), - ExponentialHistogramDataPointMarshaler.create(point))) + marshalerSource.create(point))) .collect(Collectors.toList()); } - private static Metric toProtoMetric(MetricData metricData) { - return parse(Metric.getDefaultInstance(), MetricMarshaler.create(metricData)); + private static Metric toProtoMetric(MarshalerSource marshalerSource, MetricData metricData) { + return parse(Metric.getDefaultInstance(), marshalerSource.create(metricData)); } private static List toProtoResourceMetrics( - Collection metricDataList) { + MarshalerSource marshalerSource, Collection metricDataList) { ExportMetricsServiceRequest exportRequest = parse( ExportMetricsServiceRequest.getDefaultInstance(), - MetricsRequestMarshaler.create(metricDataList)); + marshalerSource.create(metricDataList)); return exportRequest.getResourceMetricsList(); } @@ -1162,4 +1296,101 @@ private static String toJson(Marshaler marshaler) { } return new String(bos.toByteArray(), StandardCharsets.UTF_8); } + + private static Marshaler createMarshaler(StatelessMarshaler marshaler, T data) { + return new Marshaler() { + private final MarshalerContext context = new MarshalerContext(); + private final int size = marshaler.getBinarySerializedSize(data, context); + + @Override + public int getBinarySerializedSize() { + return size; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + context.resetReadIndex(); + marshaler.writeTo(output, data, context); + } + }; + } + + private enum MarshalerSource { + STATEFUL_MARSHALER { + @Override + Marshaler create(PointData point) { + return NumberDataPointMarshaler.create(point); + } + + @Override + Marshaler create(SummaryPointData point) { + return SummaryDataPointMarshaler.create(point); + } + + @Override + Marshaler create(HistogramPointData point) { + return HistogramDataPointMarshaler.create(point); + } + + @Override + Marshaler create(ExponentialHistogramPointData point) { + return ExponentialHistogramDataPointMarshaler.create(point); + } + + @Override + Marshaler create(MetricData metric) { + return MetricMarshaler.create(metric); + } + + @Override + Marshaler create(Collection metricDataList) { + return MetricsRequestMarshaler.create(metricDataList); + } + }, + STATELESS_MARSHALER { + @Override + Marshaler create(PointData point) { + return createMarshaler(NumberDataPointStatelessMarshaler.INSTANCE, point); + } + + @Override + Marshaler create(SummaryPointData point) { + return createMarshaler(SummaryDataPointStatelessMarshaler.INSTANCE, point); + } + + @Override + Marshaler create(HistogramPointData point) { + return createMarshaler(HistogramDataPointStatelessMarshaler.INSTANCE, point); + } + + @Override + Marshaler create(ExponentialHistogramPointData point) { + return createMarshaler(ExponentialHistogramDataPointStatelessMarshaler.INSTANCE, point); + } + + @Override + Marshaler create(MetricData metric) { + return createMarshaler(MetricStatelessMarshaler.INSTANCE, metric); + } + + @Override + Marshaler create(Collection metricDataList) { + LowAllocationMetricsRequestMarshaler marshaler = new LowAllocationMetricsRequestMarshaler(); + marshaler.initialize(metricDataList); + return marshaler; + } + }; + + abstract Marshaler create(PointData point); + + abstract Marshaler create(SummaryPointData point); + + abstract Marshaler create(HistogramPointData point); + + abstract Marshaler create(ExponentialHistogramPointData point); + + abstract Marshaler create(MetricData metric); + + abstract Marshaler create(Collection metricDataList); + } } diff --git a/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/traces/LowAllocationTraceRequestMarshalerTest.java b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/traces/LowAllocationTraceRequestMarshalerTest.java new file mode 100644 index 00000000000..e868373d0ca --- /dev/null +++ b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/traces/LowAllocationTraceRequestMarshalerTest.java @@ -0,0 +1,159 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.traces; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.api.trace.TraceFlags; +import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.trace.TestSpanData; +import io.opentelemetry.sdk.trace.data.EventData; +import io.opentelemetry.sdk.trace.data.LinkData; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.data.StatusData; +import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.Test; + +class LowAllocationTraceRequestMarshalerTest { + + private static final AttributeKey KEY_BOOL = AttributeKey.booleanKey("key_bool"); + private static final AttributeKey KEY_STRING = AttributeKey.stringKey("key_string"); + private static final AttributeKey KEY_INT = AttributeKey.longKey("key_int"); + private static final AttributeKey KEY_DOUBLE = AttributeKey.doubleKey("key_double"); + private static final AttributeKey> KEY_STRING_ARRAY = + AttributeKey.stringArrayKey("key_string_array"); + private static final AttributeKey> KEY_LONG_ARRAY = + AttributeKey.longArrayKey("key_long_array"); + private static final AttributeKey> KEY_DOUBLE_ARRAY = + AttributeKey.doubleArrayKey("key_double_array"); + private static final AttributeKey> KEY_BOOLEAN_ARRAY = + AttributeKey.booleanArrayKey("key_boolean_array"); + private static final AttributeKey LINK_ATTR_KEY = AttributeKey.stringKey("link_attr_key"); + + private static final Resource RESOURCE = + Resource.create( + Attributes.builder() + .put(KEY_BOOL, true) + .put(KEY_STRING, "string") + .put(KEY_INT, 100L) + .put(KEY_DOUBLE, 100.3) + .put(KEY_STRING_ARRAY, Arrays.asList("string", "string")) + .put(KEY_LONG_ARRAY, Arrays.asList(12L, 23L)) + .put(KEY_DOUBLE_ARRAY, Arrays.asList(12.3, 23.1)) + .put(KEY_BOOLEAN_ARRAY, Arrays.asList(true, false)) + .build()); + + private static final InstrumentationScopeInfo INSTRUMENTATION_SCOPE_INFO = + InstrumentationScopeInfo.create("name"); + private static final String TRACE_ID = "7b2e170db4df2d593ddb4ddf2ddf2d59"; + private static final String SPAN_ID = "170d3ddb4d23e81f"; + private static final SpanContext SPAN_CONTEXT = + SpanContext.create(TRACE_ID, SPAN_ID, TraceFlags.getSampled(), TraceState.getDefault()); + + private final List spanDataList = createSpanDataList(); + + private static List createSpanDataList() { + List spanDataList = new ArrayList<>(); + for (int i = 0; i < 5; i++) { + spanDataList.add(createSpanData()); + } + return spanDataList; + } + + private static SpanData createSpanData() { + return TestSpanData.builder() + .setResource(RESOURCE) + .setInstrumentationScopeInfo(INSTRUMENTATION_SCOPE_INFO) + .setHasEnded(true) + .setSpanContext(SPAN_CONTEXT) + .setParentSpanContext(SpanContext.getInvalid()) + .setName("GET /api/endpoint") + .setKind(SpanKind.SERVER) + .setStartEpochNanos(12345) + .setEndEpochNanos(12349) + .setAttributes( + Attributes.builder() + .put(KEY_BOOL, true) + .put(KEY_STRING, "string") + .put(KEY_INT, 100L) + .put(KEY_DOUBLE, 100.3) + .build()) + .setTotalAttributeCount(2) + .setEvents( + Arrays.asList( + EventData.create(12347, "my_event_1", Attributes.empty()), + EventData.create(12348, "my_event_2", Attributes.of(KEY_INT, 1234L)), + EventData.create(12349, "my_event_3", Attributes.empty()))) + .setTotalRecordedEvents(4) + .setLinks( + Arrays.asList( + LinkData.create(SPAN_CONTEXT), + LinkData.create(SPAN_CONTEXT, Attributes.of(LINK_ATTR_KEY, "value")))) + .setTotalRecordedLinks(3) + .setStatus(StatusData.ok()) + .build(); + } + + @Test + void validateOutput() throws Exception { + byte[] result; + { + TraceRequestMarshaler requestMarshaler = TraceRequestMarshaler.create(spanDataList); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeBinaryTo(customOutput); + result = customOutput.toByteArray(); + } + + byte[] lowAllocationResult; + { + LowAllocationTraceRequestMarshaler requestMarshaler = + new LowAllocationTraceRequestMarshaler(); + requestMarshaler.initialize(spanDataList); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeBinaryTo(customOutput); + lowAllocationResult = customOutput.toByteArray(); + } + + assertThat(lowAllocationResult).isEqualTo(result); + } + + @Test + void validateJsonOutput() throws Exception { + String result; + { + TraceRequestMarshaler requestMarshaler = TraceRequestMarshaler.create(spanDataList); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeJsonTo(customOutput); + result = new String(customOutput.toByteArray(), StandardCharsets.UTF_8); + } + + String lowAllocationResult; + { + LowAllocationTraceRequestMarshaler requestMarshaler = + new LowAllocationTraceRequestMarshaler(); + requestMarshaler.initialize(spanDataList); + ByteArrayOutputStream customOutput = + new ByteArrayOutputStream(requestMarshaler.getBinarySerializedSize()); + requestMarshaler.writeJsonTo(customOutput); + lowAllocationResult = new String(customOutput.toByteArray(), StandardCharsets.UTF_8); + } + + assertThat(lowAllocationResult).isEqualTo(result); + } +} diff --git a/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/traces/SpanFlagsTest.java b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/traces/SpanFlagsTest.java new file mode 100644 index 00000000000..481ffc2d23c --- /dev/null +++ b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/traces/SpanFlagsTest.java @@ -0,0 +1,121 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.internal.otlp.traces; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.trace.TraceFlags; +import org.junit.jupiter.api.Test; + +/** Unit tests for {@link SpanFlags}. */ +public class SpanFlagsTest { + + @Test + void withParentIsRemoteFlags() { + assertThat(SpanFlags.withParentIsRemoteFlags(TraceFlags.fromByte((byte) 0xff), false)) + .isEqualTo(0x1ff); + assertThat(SpanFlags.withParentIsRemoteFlags(TraceFlags.fromByte((byte) 0x01), false)) + .isEqualTo(0x101); + assertThat(SpanFlags.withParentIsRemoteFlags(TraceFlags.fromByte((byte) 0x05), false)) + .isEqualTo(0x105); + assertThat(SpanFlags.withParentIsRemoteFlags(TraceFlags.fromByte((byte) 0x00), false)) + .isEqualTo(0x100); + + assertThat(SpanFlags.withParentIsRemoteFlags(TraceFlags.fromByte((byte) 0xff), true)) + .isEqualTo(0x3ff); + assertThat(SpanFlags.withParentIsRemoteFlags(TraceFlags.fromByte((byte) 0x01), true)) + .isEqualTo(0x301); + assertThat(SpanFlags.withParentIsRemoteFlags(TraceFlags.fromByte((byte) 0x05), true)) + .isEqualTo(0x305); + assertThat(SpanFlags.withParentIsRemoteFlags(TraceFlags.fromByte((byte) 0x00), true)) + .isEqualTo(0x300); + } + + @Test + void getTraceFlags() { + assertThat(SpanFlags.getTraceFlags(0x1ff)).isEqualTo(TraceFlags.fromByte((byte) 0xff)); + assertThat(SpanFlags.getTraceFlags(0xffffffff)).isEqualTo(TraceFlags.fromByte((byte) 0xff)); + assertThat(SpanFlags.getTraceFlags(0x000000ff)).isEqualTo(TraceFlags.fromByte((byte) 0xff)); + + assertThat(SpanFlags.getTraceFlags(0x100)).isEqualTo(TraceFlags.fromByte((byte) 0x00)); + assertThat(SpanFlags.getTraceFlags(0xffffff00)).isEqualTo(TraceFlags.fromByte((byte) 0x00)); + assertThat(SpanFlags.getTraceFlags(0x00000000)).isEqualTo(TraceFlags.fromByte((byte) 0x00)); + + assertThat(SpanFlags.getTraceFlags(0x101)).isEqualTo(TraceFlags.fromByte((byte) 0x01)); + assertThat(SpanFlags.getTraceFlags(0xffffff01)).isEqualTo(TraceFlags.fromByte((byte) 0x01)); + assertThat(SpanFlags.getTraceFlags(0x00000001)).isEqualTo(TraceFlags.fromByte((byte) 0x01)); + } + + @Test + void isKnownWhetherParentIsRemote() { + assertThat(SpanFlags.isKnownWhetherParentIsRemote(SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT)) + .isTrue(); + assertThat( + SpanFlags.isKnownWhetherParentIsRemote( + 0x00000001 | SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT)) + .isTrue(); + assertThat( + SpanFlags.isKnownWhetherParentIsRemote( + 0x10000000 | SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT)) + .isTrue(); + assertThat( + SpanFlags.isKnownWhetherParentIsRemote( + 0x00000200 | SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT)) + .isTrue(); + assertThat(SpanFlags.isKnownWhetherParentIsRemote(SpanFlags.CONTEXT_IS_REMOTE_MASK)).isTrue(); + assertThat(SpanFlags.isKnownWhetherParentIsRemote(0xffffffff)).isTrue(); + + assertThat(SpanFlags.isKnownWhetherParentIsRemote(~SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT)) + .isFalse(); + assertThat( + SpanFlags.isKnownWhetherParentIsRemote( + 0x00000001 & ~SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT)) + .isFalse(); + assertThat( + SpanFlags.isKnownWhetherParentIsRemote( + 0x10000000 & ~SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT)) + .isFalse(); + assertThat( + SpanFlags.isKnownWhetherParentIsRemote( + 0x00000200 & ~SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT)) + .isFalse(); + assertThat(SpanFlags.isKnownWhetherParentIsRemote(0x00000000)).isFalse(); + } + + @Test + void isParentRemote() { + assertThat( + SpanFlags.isParentRemote( + SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT | SpanFlags.CONTEXT_IS_REMOTE_BIT)) + .isTrue(); + assertThat( + SpanFlags.isParentRemote( + 0x00000001 | SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT | SpanFlags.CONTEXT_IS_REMOTE_BIT)) + .isTrue(); + assertThat( + SpanFlags.isParentRemote( + 0x10000000 | SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT | SpanFlags.CONTEXT_IS_REMOTE_BIT)) + .isTrue(); + assertThat( + SpanFlags.isParentRemote( + 0x00000200 | SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT | SpanFlags.CONTEXT_IS_REMOTE_BIT)) + .isTrue(); + assertThat(SpanFlags.isParentRemote(SpanFlags.CONTEXT_IS_REMOTE_MASK)).isTrue(); + assertThat(SpanFlags.isParentRemote(0xffffffff)).isTrue(); + + assertThat(SpanFlags.isParentRemote(SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT)).isFalse(); + assertThat(SpanFlags.isParentRemote(~SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT)).isFalse(); + assertThat(SpanFlags.isParentRemote(SpanFlags.CONTEXT_IS_REMOTE_BIT)).isFalse(); + assertThat(SpanFlags.isParentRemote(~SpanFlags.CONTEXT_IS_REMOTE_BIT)).isFalse(); + assertThat( + SpanFlags.isParentRemote( + ~SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT & ~SpanFlags.CONTEXT_IS_REMOTE_BIT)) + .isFalse(); + assertThat(SpanFlags.isParentRemote(0x00000200 & ~SpanFlags.CONTEXT_HAS_IS_REMOTE_BIT)) + .isFalse(); + assertThat(SpanFlags.isParentRemote(0x00000000)).isFalse(); + } +} diff --git a/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/traces/TraceRequestMarshalerTest.java b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/traces/TraceRequestMarshalerTest.java index c9e9b1d3fc2..9e00b634783 100644 --- a/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/traces/TraceRequestMarshalerTest.java +++ b/exporters/otlp/common/src/test/java/io/opentelemetry/exporter/internal/otlp/traces/TraceRequestMarshalerTest.java @@ -27,6 +27,9 @@ import io.opentelemetry.api.trace.TraceId; import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.MarshalerContext; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.marshal.StatelessMarshaler; import io.opentelemetry.proto.common.v1.AnyValue; import io.opentelemetry.proto.common.v1.ArrayValue; import io.opentelemetry.proto.common.v1.InstrumentationScope; @@ -40,6 +43,7 @@ import io.opentelemetry.sdk.testing.trace.TestSpanData; import io.opentelemetry.sdk.trace.data.EventData; import io.opentelemetry.sdk.trace.data.LinkData; +import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.data.StatusData; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -49,6 +53,8 @@ import java.util.Collections; import java.util.Locale; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; class TraceRequestMarshalerTest { @@ -56,7 +62,9 @@ class TraceRequestMarshalerTest { new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4}; private static final String TRACE_ID = TraceId.fromBytes(TRACE_ID_BYTES); private static final byte[] SPAN_ID_BYTES = new byte[] {0, 0, 0, 0, 4, 3, 2, 1}; + private static final byte[] PARENT_SPAN_ID_BYTES = new byte[] {0, 0, 0, 0, 5, 6, 7, 8}; private static final String SPAN_ID = SpanId.fromBytes(SPAN_ID_BYTES); + private static final String PARENT_SPAN_ID = SpanId.fromBytes(PARENT_SPAN_ID_BYTES); private static final String TRACE_STATE_VALUE = "baz=qux,foo=bar"; private static final SpanContext SPAN_CONTEXT = SpanContext.create( @@ -65,6 +73,10 @@ class TraceRequestMarshalerTest { TraceFlags.getSampled(), TraceState.builder().put("foo", "bar").put("baz", "qux").build()); + private static final SpanContext PARENT_SPAN_CONTEXT = + SpanContext.createFromRemoteParent( + TRACE_ID, PARENT_SPAN_ID, TraceFlags.getSampled(), TraceState.builder().build()); + @Test void toProtoResourceSpans() { ResourceSpansMarshaler[] resourceSpansMarshalers = @@ -110,12 +122,13 @@ void toProtoResourceSpans() { .build()); } - @Test - void toProtoSpan() { - Span span = + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoSpan(MarshalerSource marshalerSource) { + Span protoSpan = parse( Span.getDefaultInstance(), - SpanMarshaler.create( + marshalerSource.create( TestSpanData.builder() .setHasEnded(true) .setSpanContext(SPAN_CONTEXT) @@ -145,15 +158,19 @@ void toProtoSpan() { .setStatus(StatusData.ok()) .build())); - assertThat(span.getTraceId().toByteArray()).isEqualTo(TRACE_ID_BYTES); - assertThat(span.getSpanId().toByteArray()).isEqualTo(SPAN_ID_BYTES); - assertThat(span.getTraceState()).isEqualTo(TRACE_STATE_VALUE); - assertThat(span.getParentSpanId().toByteArray()).isEqualTo(new byte[] {}); - assertThat(span.getName()).isEqualTo("GET /api/endpoint"); - assertThat(span.getKind()).isEqualTo(SPAN_KIND_SERVER); - assertThat(span.getStartTimeUnixNano()).isEqualTo(12345); - assertThat(span.getEndTimeUnixNano()).isEqualTo(12349); - assertThat(span.getAttributesList()) + assertThat(protoSpan.getTraceId().toByteArray()).isEqualTo(TRACE_ID_BYTES); + assertThat(protoSpan.getSpanId().toByteArray()).isEqualTo(SPAN_ID_BYTES); + assertThat(protoSpan.getFlags() & 0xff) + .isEqualTo((SPAN_CONTEXT.getTraceFlags().asByte() & 0xff)); + assertThat(SpanFlags.isKnownWhetherParentIsRemote(protoSpan.getFlags())).isTrue(); + assertThat(SpanFlags.isParentRemote(protoSpan.getFlags())).isFalse(); + assertThat(protoSpan.getTraceState()).isEqualTo(TRACE_STATE_VALUE); + assertThat(protoSpan.getParentSpanId().toByteArray()).isEqualTo(new byte[] {}); + assertThat(protoSpan.getName()).isEqualTo("GET /api/endpoint"); + assertThat(protoSpan.getKind()).isEqualTo(SPAN_KIND_SERVER); + assertThat(protoSpan.getStartTimeUnixNano()).isEqualTo(12345); + assertThat(protoSpan.getEndTimeUnixNano()).isEqualTo(12349); + assertThat(protoSpan.getAttributesList()) .containsOnly( KeyValue.newBuilder() .setKey("key") @@ -215,20 +232,58 @@ void toProtoSpan() { .build()) .build()) .build()); - assertThat(span.getDroppedAttributesCount()).isEqualTo(1); - assertThat(span.getEventsList()) + assertThat(protoSpan.getDroppedAttributesCount()).isEqualTo(1); + assertThat(protoSpan.getEventsList()) .containsExactly( Span.Event.newBuilder().setTimeUnixNano(12347).setName("my_event").build()); - assertThat(span.getDroppedEventsCount()).isEqualTo(2); // 3 - 1 - assertThat(span.getLinksList()) + assertThat(protoSpan.getDroppedEventsCount()).isEqualTo(2); // 3 - 1 + assertThat(protoSpan.getLinksList()) .containsExactly( Span.Link.newBuilder() .setTraceId(ByteString.copyFrom(TRACE_ID_BYTES)) .setSpanId(ByteString.copyFrom(SPAN_ID_BYTES)) + .setFlags( + (SPAN_CONTEXT.getTraceFlags().asByte() & 0xff) + | SpanFlags.getHasParentIsRemoteMask()) .setTraceState(encodeTraceState(SPAN_CONTEXT.getTraceState())) .build()); - assertThat(span.getDroppedLinksCount()).isEqualTo(1); // 2 - 1 - assertThat(span.getStatus()).isEqualTo(Status.newBuilder().setCode(STATUS_CODE_OK).build()); + assertThat(protoSpan.getDroppedLinksCount()).isEqualTo(1); // 2 - 1 + assertThat(protoSpan.getStatus()) + .isEqualTo(Status.newBuilder().setCode(STATUS_CODE_OK).build()); + } + + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoSpan_withRemoteParent(MarshalerSource marshalerSource) { + Span protoSpan = + parse( + Span.getDefaultInstance(), + marshalerSource.create( + TestSpanData.builder() + .setHasEnded(true) + .setSpanContext(SPAN_CONTEXT) + .setParentSpanContext(PARENT_SPAN_CONTEXT) + .setName("GET /api/endpoint") + .setKind(SpanKind.SERVER) + .setStartEpochNanos(12345) + .setEndEpochNanos(12349) + .setStatus(StatusData.ok()) + .build())); + + assertThat(protoSpan.getTraceId().toByteArray()).isEqualTo(TRACE_ID_BYTES); + assertThat(protoSpan.getSpanId().toByteArray()).isEqualTo(SPAN_ID_BYTES); + assertThat(protoSpan.getFlags() & 0xff) + .isEqualTo((SPAN_CONTEXT.getTraceFlags().asByte() & 0xff)); + assertThat(SpanFlags.isKnownWhetherParentIsRemote(protoSpan.getFlags())).isTrue(); + assertThat(SpanFlags.isParentRemote(protoSpan.getFlags())).isTrue(); + assertThat(protoSpan.getTraceState()).isEqualTo(TRACE_STATE_VALUE); + assertThat(protoSpan.getParentSpanId().toByteArray()).isEqualTo(PARENT_SPAN_ID_BYTES); + assertThat(protoSpan.getName()).isEqualTo("GET /api/endpoint"); + assertThat(protoSpan.getKind()).isEqualTo(SPAN_KIND_SERVER); + assertThat(protoSpan.getStartTimeUnixNano()).isEqualTo(12345); + assertThat(protoSpan.getEndTimeUnixNano()).isEqualTo(12349); + assertThat(protoSpan.getStatus()) + .isEqualTo(Status.newBuilder().setCode(STATUS_CODE_OK).build()); } @Test @@ -245,9 +300,10 @@ void toProtoSpanKind() { .isEqualTo(io.opentelemetry.proto.trace.v1.internal.Span.SpanKind.SPAN_KIND_CONSUMER); } - @Test - void toProtoStatus() { - assertThat(parse(Status.getDefaultInstance(), SpanStatusMarshaler.create(StatusData.unset()))) + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoStatus(MarshalerSource marshalerSource) { + assertThat(parse(Status.getDefaultInstance(), marshalerSource.create(StatusData.unset()))) .isEqualTo(Status.newBuilder().setCode(STATUS_CODE_UNSET).build()); assertThat( parse( @@ -266,12 +322,13 @@ void toProtoStatus() { .isEqualTo(Status.newBuilder().setCode(STATUS_CODE_OK).setMessage("OK_OVERRIDE").build()); } - @Test - void toProtoSpanEvent_WithoutAttributes() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoSpanEvent_WithoutAttributes(MarshalerSource marshalerSource) { assertThat( parse( Span.Event.getDefaultInstance(), - SpanEventMarshaler.create( + marshalerSource.create( EventData.create(12345, "test_without_attributes", Attributes.empty())))) .isEqualTo( Span.Event.newBuilder() @@ -280,12 +337,13 @@ void toProtoSpanEvent_WithoutAttributes() { .build()); } - @Test - void toProtoSpanEvent_WithAttributes() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoSpanEvent_WithAttributes(MarshalerSource marshalerSource) { assertThat( parse( Span.Event.getDefaultInstance(), - SpanEventMarshaler.create( + marshalerSource.create( EventData.create( 12345, "test_with_attributes", @@ -304,32 +362,57 @@ void toProtoSpanEvent_WithAttributes() { .build()); } - @Test - void toProtoSpanLink_WithoutAttributes() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoSpanLink_WithoutAttributes(MarshalerSource marshalerSource) { assertThat( parse( Span.Link.getDefaultInstance(), - SpanLinkMarshaler.create(LinkData.create(SPAN_CONTEXT)))) + marshalerSource.create(LinkData.create(SPAN_CONTEXT)))) .isEqualTo( Span.Link.newBuilder() .setTraceId(ByteString.copyFrom(TRACE_ID_BYTES)) .setSpanId(ByteString.copyFrom(SPAN_ID_BYTES)) + .setFlags( + (SPAN_CONTEXT.getTraceFlags().asByte() & 0xff) + | SpanFlags.getHasParentIsRemoteMask()) .setTraceState(TRACE_STATE_VALUE) .build()); } - @Test - void toProtoSpanLink_WithAttributes() { + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoSpanLink_WithRemoteContext(MarshalerSource marshalerSource) { assertThat( parse( Span.Link.getDefaultInstance(), - SpanLinkMarshaler.create( + marshalerSource.create(LinkData.create(PARENT_SPAN_CONTEXT)))) + .isEqualTo( + Span.Link.newBuilder() + .setTraceId(ByteString.copyFrom(TRACE_ID_BYTES)) + .setSpanId(ByteString.copyFrom(PARENT_SPAN_ID_BYTES)) + .setFlags( + (SPAN_CONTEXT.getTraceFlags().asByte() & 0xff) + | SpanFlags.getParentIsRemoteMask()) + .build()); + } + + @ParameterizedTest + @EnumSource(MarshalerSource.class) + void toProtoSpanLink_WithAttributes(MarshalerSource marshalerSource) { + assertThat( + parse( + Span.Link.getDefaultInstance(), + marshalerSource.create( LinkData.create( SPAN_CONTEXT, Attributes.of(stringKey("key_string"), "string"), 5)))) .isEqualTo( Span.Link.newBuilder() .setTraceId(ByteString.copyFrom(TRACE_ID_BYTES)) .setSpanId(ByteString.copyFrom(SPAN_ID_BYTES)) + .setFlags( + (SPAN_CONTEXT.getTraceFlags().asByte() & 0xff) + | SpanFlags.getHasParentIsRemoteMask()) .setTraceState(TRACE_STATE_VALUE) .addAttributes( KeyValue.newBuilder() @@ -425,7 +508,6 @@ private static byte[] toByteArray(Marshaler marshaler) { } private static String toJson(Marshaler marshaler) { - ByteArrayOutputStream bos = new ByteArrayOutputStream(); try { marshaler.writeJsonTo(bos); @@ -434,4 +516,75 @@ private static String toJson(Marshaler marshaler) { } return new String(bos.toByteArray(), StandardCharsets.UTF_8); } + + private static Marshaler createMarshaler(StatelessMarshaler marshaler, T data) { + return new Marshaler() { + private final MarshalerContext context = new MarshalerContext(); + private final int size = marshaler.getBinarySerializedSize(data, context); + + @Override + public int getBinarySerializedSize() { + return size; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + context.resetReadIndex(); + marshaler.writeTo(output, data, context); + } + }; + } + + private enum MarshalerSource { + STATEFUL_MARSHALER { + @Override + Marshaler create(SpanData spanData) { + return SpanMarshaler.create(spanData); + } + + @Override + Marshaler create(StatusData statusData) { + return SpanStatusMarshaler.create(statusData); + } + + @Override + Marshaler create(EventData eventData) { + return SpanEventMarshaler.create(eventData); + } + + @Override + Marshaler create(LinkData linkData) { + return SpanLinkMarshaler.create(linkData); + } + }, + STATELESS_MARSHALER { + @Override + Marshaler create(SpanData spanData) { + return createMarshaler(SpanStatelessMarshaler.INSTANCE, spanData); + } + + @Override + Marshaler create(StatusData statusData) { + return createMarshaler(SpanStatusStatelessMarshaler.INSTANCE, statusData); + } + + @Override + Marshaler create(EventData eventData) { + return createMarshaler(SpanEventStatelessMarshaler.INSTANCE, eventData); + } + + @Override + Marshaler create(LinkData linkData) { + return createMarshaler(SpanLinkStatelessMarshaler.INSTANCE, linkData); + } + }; + + abstract Marshaler create(SpanData spanData); + + abstract Marshaler create(StatusData statusData); + + abstract Marshaler create(EventData eventData); + + abstract Marshaler create(LinkData linkData); + } } diff --git a/exporters/otlp/profiles/build.gradle.kts b/exporters/otlp/profiles/build.gradle.kts new file mode 100644 index 00000000000..51979a5c8c6 --- /dev/null +++ b/exporters/otlp/profiles/build.gradle.kts @@ -0,0 +1,24 @@ +plugins { + id("otel.java-conventions") + // TODO (jack-berg): uncomment when ready to publish + // id("otel.publish-conventions") + + id("otel.animalsniffer-conventions") +} + +description = "OpenTelemetry - Profiles Exporter" +otelJava.moduleName.set("io.opentelemetry.exporter.otlp.profiles") + +val versions: Map by project +dependencies { + api(project(":sdk:common")) + api(project(":exporters:common")) + implementation(project(":exporters:otlp:common")) + + annotationProcessor("com.google.auto.value:auto-value") + + testCompileOnly("com.google.guava:guava") + testImplementation("com.fasterxml.jackson.core:jackson-databind") + testImplementation("com.google.protobuf:protobuf-java-util") + testImplementation("io.opentelemetry.proto:opentelemetry-proto") +} diff --git a/extensions/incubator/gradle.properties b/exporters/otlp/profiles/gradle.properties similarity index 100% rename from extensions/incubator/gradle.properties rename to exporters/otlp/profiles/gradle.properties diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableAttributeUnitData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableAttributeUnitData.java new file mode 100644 index 00000000000..ee3e54aa177 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableAttributeUnitData.java @@ -0,0 +1,33 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal.data; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.exporter.otlp.profiles.AttributeUnitData; +import javax.annotation.concurrent.Immutable; + +/** + * Auto value implementation of {@link AttributeUnitData}, which represents a mapping between + * Attribute Keys and Units. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@Immutable +@AutoValue +public abstract class ImmutableAttributeUnitData implements AttributeUnitData { + + /** + * Returns a new AttributeUnitData mapping the given key to the given unit. + * + * @return a new AttributeUnitData mapping the given key to the given unit. + */ + public static AttributeUnitData create(int attributeKeyStringIndex, int unitStringIndex) { + return new AutoValue_ImmutableAttributeUnitData(attributeKeyStringIndex, unitStringIndex); + } + + ImmutableAttributeUnitData() {} +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableFunctionData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableFunctionData.java new file mode 100644 index 00000000000..cd0c12e901c --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableFunctionData.java @@ -0,0 +1,34 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal.data; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.exporter.otlp.profiles.FunctionData; +import javax.annotation.concurrent.Immutable; + +/** + * Auto value implementation of {@link FunctionData}, which describes a code function. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@Immutable +@AutoValue +public abstract class ImmutableFunctionData implements FunctionData { + + /** + * Returns a new FunctionData describing the given function characteristics. + * + * @return a new FunctionData describing the given function characteristics. + */ + public static FunctionData create( + int nameStringIndex, int systemNameStringIndex, int filenameStringIndex, long startLine) { + return new AutoValue_ImmutableFunctionData( + nameStringIndex, systemNameStringIndex, filenameStringIndex, startLine); + } + + ImmutableFunctionData() {} +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableLineData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableLineData.java new file mode 100644 index 00000000000..e0eebeaff60 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableLineData.java @@ -0,0 +1,33 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal.data; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.exporter.otlp.profiles.LineData; +import javax.annotation.concurrent.Immutable; + +/** + * Auto value implementation of {@link LineData}, which details a specific line in a source code, + * linked to a function. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@Immutable +@AutoValue +public abstract class ImmutableLineData implements LineData { + + /** + * Returns a new LineData describing the given details a specific line in a source code. + * + * @return a new LineData describing the given details a specific line in a source code. + */ + public static LineData create(int functionIndex, long line, long column) { + return new AutoValue_ImmutableLineData(functionIndex, line, column); + } + + ImmutableLineData() {} +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableLinkData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableLinkData.java new file mode 100644 index 00000000000..1345503b405 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableLinkData.java @@ -0,0 +1,33 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal.data; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.exporter.otlp.profiles.LinkData; +import javax.annotation.concurrent.Immutable; + +/** + * Auto value implementation of {@link LinkData}, which represents a connection from a profile + * Sample to a trace Span. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@Immutable +@AutoValue +public abstract class ImmutableLinkData implements LinkData { + + /** + * Returns a new LinkData representing an association to the given trace span. + * + * @return a new LinkData representing an association to the given trace span. + */ + public static LinkData create(String traceId, String spanId) { + return new AutoValue_ImmutableLinkData(traceId, spanId); + } + + ImmutableLinkData() {} +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableLocationData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableLocationData.java new file mode 100644 index 00000000000..56792a30478 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableLocationData.java @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal.data; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.exporter.otlp.profiles.LineData; +import io.opentelemetry.exporter.otlp.profiles.LocationData; +import java.util.List; +import javax.annotation.concurrent.Immutable; + +/** + * Auto value implementation of {@link LocationData}, which describes function and line table debug + * information. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@Immutable +@AutoValue +public abstract class ImmutableLocationData implements LocationData { + + /** + * Returns a new LocationData describing the given function and line table information. + * + * @return a new LocationData describing the given function and line table information. + */ + public static LocationData create( + Integer mappingIndex, + long address, + List lines, + boolean folded, + List attributeIndices) { + return new AutoValue_ImmutableLocationData( + mappingIndex, address, lines, folded, attributeIndices); + } + + ImmutableLocationData() {} +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableMappingData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableMappingData.java new file mode 100644 index 00000000000..a1f4c07843c --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableMappingData.java @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal.data; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.exporter.otlp.profiles.MappingData; +import java.util.List; +import javax.annotation.concurrent.Immutable; + +/** + * Auto value implementation of {@link MappingData}, which describes the mapping of a binary in + * memory. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@Immutable +@AutoValue +public abstract class ImmutableMappingData implements MappingData { + + /** + * Returns a new MappingData describing the given mapping of a binary in memory. + * + * @return a new MappingData describing the given mapping of a binary in memory. + */ + @SuppressWarnings("TooManyParameters") + public static MappingData create( + long memoryStart, + long memoryLimit, + long fileOffset, + int filenameStringIndex, + List attributeIndices, + boolean hasFunctions, + boolean hasFilenames, + boolean hasLineNumbers, + boolean hasInlineFrames) { + return new AutoValue_ImmutableMappingData( + memoryStart, + memoryLimit, + fileOffset, + filenameStringIndex, + attributeIndices, + hasFunctions, + hasFilenames, + hasLineNumbers, + hasInlineFrames); + } + + ImmutableMappingData() {} +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableProfileData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableProfileData.java new file mode 100644 index 00000000000..520139b2452 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableProfileData.java @@ -0,0 +1,93 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal.data; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.exporter.internal.otlp.AttributeKeyValue; +import io.opentelemetry.exporter.otlp.profiles.AttributeUnitData; +import io.opentelemetry.exporter.otlp.profiles.FunctionData; +import io.opentelemetry.exporter.otlp.profiles.LinkData; +import io.opentelemetry.exporter.otlp.profiles.LocationData; +import io.opentelemetry.exporter.otlp.profiles.MappingData; +import io.opentelemetry.exporter.otlp.profiles.ProfileData; +import io.opentelemetry.exporter.otlp.profiles.SampleData; +import io.opentelemetry.exporter.otlp.profiles.ValueTypeData; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; +import java.nio.ByteBuffer; +import java.util.List; +import javax.annotation.concurrent.Immutable; + +/** + * Auto value implementation of {@link ProfileData}, which represents a complete profile, including + * sample types, samples, mappings to binaries, locations, functions, string table, and additional + * metadata. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@Immutable +@AutoValue +public abstract class ImmutableProfileData implements ProfileData { + + /** + * Returns a new ProfileData representing the given data. + * + * @return a new ProfileData representing the given data. + */ + @SuppressWarnings("TooManyParameters") + public static ProfileData create( + Resource resource, + InstrumentationScopeInfo instrumentationScopeInfo, + List sampleTypes, + List samples, + List mappingTable, + List locationTable, + List locationIndices, + List functionTable, + List> attributeTable, + List attributeUnits, + List linkTable, + List stringTable, + long timeNanos, + long durationNanos, + ValueTypeData periodType, + long period, + List commentStrIndices, + int defaultSampleTypeStringIndex, + String profileId, + List attributeIndices, + int droppedAttributesCount, + String originalPayloadFormat, + ByteBuffer originalPayload) { + return new AutoValue_ImmutableProfileData( + resource, + instrumentationScopeInfo, + sampleTypes, + samples, + mappingTable, + locationTable, + locationIndices, + functionTable, + attributeTable, + attributeUnits, + linkTable, + stringTable, + timeNanos, + durationNanos, + periodType, + period, + commentStrIndices, + defaultSampleTypeStringIndex, + profileId, + attributeIndices, + droppedAttributesCount, + originalPayloadFormat, + originalPayload); + } + + ImmutableProfileData() {} +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableSampleData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableSampleData.java new file mode 100644 index 00000000000..c0b3deac55b --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableSampleData.java @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal.data; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.exporter.otlp.profiles.SampleData; +import java.util.List; +import javax.annotation.concurrent.Immutable; + +/** + * Auto value implementation of {@link SampleData}, which records values encountered in some program + * context. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@Immutable +@AutoValue +public abstract class ImmutableSampleData implements SampleData { + + /** + * Returns a new SampleData representing the given program context. + * + * @return a new SampleData representing the given program context. + */ + public static SampleData create( + int locationsStartIndex, + int locationsLength, + List values, + List attributeIndices, + Integer linkIndex, + List timestamps) { + return new AutoValue_ImmutableSampleData( + locationsStartIndex, locationsLength, values, attributeIndices, linkIndex, timestamps); + } + + ImmutableSampleData() {} +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableValueTypeData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableValueTypeData.java new file mode 100644 index 00000000000..9928403f3a2 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/internal/data/ImmutableValueTypeData.java @@ -0,0 +1,36 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.internal.data; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.exporter.otlp.profiles.AggregationTemporality; +import io.opentelemetry.exporter.otlp.profiles.ValueTypeData; +import javax.annotation.concurrent.Immutable; + +/** + * Auto value implementation of {@link ValueTypeData}, which describes the type and units of a + * value. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@Immutable +@AutoValue +public abstract class ImmutableValueTypeData implements ValueTypeData { + + /** + * Returns a new ValueTypeData describing the given type and unit characteristics. + * + * @return a new ValueTypeData describing the given type and unit characteristics. + */ + public static ValueTypeData create( + int typeStringIndex, int unitStringIndex, AggregationTemporality aggregationTemporality) { + return new AutoValue_ImmutableValueTypeData( + typeStringIndex, unitStringIndex, aggregationTemporality); + } + + ImmutableValueTypeData() {} +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/AggregationTemporality.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/AggregationTemporality.java new file mode 100644 index 00000000000..fc3d47196a6 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/AggregationTemporality.java @@ -0,0 +1,39 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +/** + * Specifies the method of aggregating metric values. + * + *

TODO: This is intentionally not the same as metrics/AggregationTemporality. For profiles.proto + * 'v1development' version, this class is considered distinct from the pre-exiting + * AggregationTemporality in metrics.proto. As the profiles.proto stabilises, they may be refactored + * into a version in common.proto. Meanwhile the Java class structure reflects the .proto structure + * in making distinct entities. + * + *

refs for refactoring discussion: + * + * @see + * "https://github.com/open-telemetry/opentelemetry-proto/blob/v1.3.0/opentelemetry/proto/metrics/v1/metrics.proto#L261" + * @see + * "https://github.com/open-telemetry/opentelemetry-proto/blob/v1.3.0/opentelemetry/proto/profiles/v1development/profiles.proto#L147" + * @see "https://github.com/open-telemetry/opentelemetry-proto/issues/547" + * @see "https://github.com/open-telemetry/opentelemetry-proto/pull/534#discussion_r1552403726" + * @see "profiles.proto::AggregationTemporality" + */ +public enum AggregationTemporality { + + /** + * DELTA is an AggregationTemporality for a profiler which reports changes since last report time. + */ + DELTA, + + /** + * CUMULATIVE is an AggregationTemporality for a profiler which reports changes since a fixed + * start time. + */ + CUMULATIVE +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/AttributeUnitData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/AttributeUnitData.java new file mode 100644 index 00000000000..1b5bbd54c6c --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/AttributeUnitData.java @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import javax.annotation.concurrent.Immutable; + +/** + * Represents a mapping between Attribute Keys and Units. + * + * @see "profiles.proto::AttributeUnit" + */ +@Immutable +public interface AttributeUnitData { + + /** Index into string table. */ + int getAttributeKeyStringIndex(); + + /** Index into string table. */ + int getUnitIndexStringIndex(); +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/AttributeUnitMarshaler.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/AttributeUnitMarshaler.java new file mode 100644 index 00000000000..311a0d1c33d --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/AttributeUnitMarshaler.java @@ -0,0 +1,67 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.profiles.v1development.internal.AttributeUnit; +import java.io.IOException; +import java.util.List; +import java.util.function.Consumer; + +final class AttributeUnitMarshaler extends MarshalerWithSize { + + private static final AttributeUnitMarshaler[] EMPTY_REPEATED = new AttributeUnitMarshaler[0]; + + private final int attributeKeyStringIndex; + private final int unitStringIndex; + + static AttributeUnitMarshaler create(AttributeUnitData attributeUnitData) { + return new AttributeUnitMarshaler( + attributeUnitData.getAttributeKeyStringIndex(), + attributeUnitData.getUnitIndexStringIndex()); + } + + static AttributeUnitMarshaler[] createRepeated(List items) { + if (items.isEmpty()) { + return EMPTY_REPEATED; + } + + AttributeUnitMarshaler[] attributeUnitMarshalers = new AttributeUnitMarshaler[items.size()]; + items.forEach( + item -> + new Consumer() { + int index = 0; + + @Override + public void accept(AttributeUnitData attributeUnitData) { + attributeUnitMarshalers[index++] = AttributeUnitMarshaler.create(attributeUnitData); + } + }); + return attributeUnitMarshalers; + } + + private AttributeUnitMarshaler(int attributeKeyStringIndex, int unitStringIndex) { + super(calculateSize(attributeKeyStringIndex, unitStringIndex)); + this.attributeKeyStringIndex = attributeKeyStringIndex; + this.unitStringIndex = unitStringIndex; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + output.serializeInt32(AttributeUnit.ATTRIBUTE_KEY_STRINDEX, attributeKeyStringIndex); + output.serializeInt32(AttributeUnit.UNIT_STRINDEX, unitStringIndex); + } + + private static int calculateSize(int attributeKeyStringIndex, int unitStringIndex) { + int size; + size = 0; + size += MarshalerUtil.sizeInt32(AttributeUnit.ATTRIBUTE_KEY_STRINDEX, attributeKeyStringIndex); + size += MarshalerUtil.sizeInt32(AttributeUnit.UNIT_STRINDEX, unitStringIndex); + return size; + } +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/FunctionData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/FunctionData.java new file mode 100644 index 00000000000..e50ba846907 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/FunctionData.java @@ -0,0 +1,32 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import javax.annotation.concurrent.Immutable; + +/** + * Describes a function. + * + * @see "profiles.proto::Function" + */ +@Immutable +public interface FunctionData { + + /** Name of the function, in human-readable form if available. Index into string table. */ + int getNameStringIndex(); + + /** + * Name of the function, as identified by the system. For instance, it can be a C++ mangled name. + * Index into string table. + */ + int getSystemNameStringIndex(); + + /** Source file containing the function. Index into string table. */ + int getFilenameStringIndex(); + + /** Line number in source file. */ + long getStartLine(); +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/FunctionMarshaler.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/FunctionMarshaler.java new file mode 100644 index 00000000000..aea4071d380 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/FunctionMarshaler.java @@ -0,0 +1,78 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.profiles.v1development.internal.Function; +import java.io.IOException; +import java.util.List; +import java.util.function.Consumer; + +final class FunctionMarshaler extends MarshalerWithSize { + + private static final FunctionMarshaler[] EMPTY_REPEATED = new FunctionMarshaler[0]; + + private final int nameStringIndex; + private final int systemNameStringIndex; + private final int filenameStringIndex; + private final long startLine; + + static FunctionMarshaler create(FunctionData functionData) { + return new FunctionMarshaler( + functionData.getNameStringIndex(), + functionData.getSystemNameStringIndex(), + functionData.getFilenameStringIndex(), + functionData.getStartLine()); + } + + static FunctionMarshaler[] createRepeated(List items) { + if (items.isEmpty()) { + return EMPTY_REPEATED; + } + + FunctionMarshaler[] functionMarshalers = new FunctionMarshaler[items.size()]; + items.forEach( + item -> + new Consumer() { + int index = 0; + + @Override + public void accept(FunctionData functionData) { + functionMarshalers[index++] = FunctionMarshaler.create(functionData); + } + }); + return functionMarshalers; + } + + private FunctionMarshaler( + int nameStringIndex, int systemNameStringIndex, int filenameStringIndex, long startLine) { + super(calculateSize(nameStringIndex, systemNameStringIndex, filenameStringIndex, startLine)); + this.nameStringIndex = nameStringIndex; + this.systemNameStringIndex = systemNameStringIndex; + this.filenameStringIndex = filenameStringIndex; + this.startLine = startLine; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + output.serializeInt32(Function.NAME_STRINDEX, nameStringIndex); + output.serializeInt32(Function.SYSTEM_NAME_STRINDEX, systemNameStringIndex); + output.serializeInt32(Function.FILENAME_STRINDEX, filenameStringIndex); + output.serializeInt64(Function.START_LINE, startLine); + } + + private static int calculateSize( + int nameStringIndex, int systemNameStringIndex, int filenameStringIndex, long startLine) { + int size = 0; + size += MarshalerUtil.sizeInt32(Function.NAME_STRINDEX, nameStringIndex); + size += MarshalerUtil.sizeInt32(Function.SYSTEM_NAME_STRINDEX, systemNameStringIndex); + size += MarshalerUtil.sizeInt32(Function.FILENAME_STRINDEX, filenameStringIndex); + size += MarshalerUtil.sizeInt64(Function.START_LINE, startLine); + return size; + } +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/InstrumentationScopeProfilesMarshaler.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/InstrumentationScopeProfilesMarshaler.java new file mode 100644 index 00000000000..8f7e2edea44 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/InstrumentationScopeProfilesMarshaler.java @@ -0,0 +1,49 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.otlp.InstrumentationScopeMarshaler; +import io.opentelemetry.proto.profiles.v1development.internal.ScopeProfiles; +import java.io.IOException; +import java.util.List; + +final class InstrumentationScopeProfilesMarshaler extends MarshalerWithSize { + + private final InstrumentationScopeMarshaler instrumentationScope; + private final List profileMarshalers; + private final byte[] schemaUrlUtf8; + + InstrumentationScopeProfilesMarshaler( + InstrumentationScopeMarshaler instrumentationScope, + byte[] schemaUrlUtf8, + List profileMarshalers) { + super(calculateSize(instrumentationScope, schemaUrlUtf8, profileMarshalers)); + this.instrumentationScope = instrumentationScope; + this.schemaUrlUtf8 = schemaUrlUtf8; + this.profileMarshalers = profileMarshalers; + } + + @Override + public void writeTo(Serializer output) throws IOException { + output.serializeMessage(ScopeProfiles.SCOPE, instrumentationScope); + output.serializeRepeatedMessage(ScopeProfiles.PROFILES, profileMarshalers); + output.serializeString(ScopeProfiles.SCHEMA_URL, schemaUrlUtf8); + } + + private static int calculateSize( + InstrumentationScopeMarshaler instrumentationScope, + byte[] schemaUrlUtf8, + List profileMarshalers) { + int size = 0; + size += MarshalerUtil.sizeMessage(ScopeProfiles.SCOPE, instrumentationScope); + size += MarshalerUtil.sizeRepeatedMessage(ScopeProfiles.PROFILES, profileMarshalers); + size += MarshalerUtil.sizeBytes(ScopeProfiles.SCHEMA_URL, schemaUrlUtf8); + return size; + } +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LineData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LineData.java new file mode 100644 index 00000000000..7f5c4738ceb --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LineData.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import javax.annotation.concurrent.Immutable; + +/** + * Details a specific line in a source code, linked to a function. + * + * @see "profiles.proto::Line" + */ +@Immutable +public interface LineData { + + /** The index of the corresponding Function for this line. Index into function table. */ + int getFunctionIndex(); + + /** Line number in source code. */ + long getLine(); + + /** Column number in source code. */ + long getColumn(); +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LineMarshaler.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LineMarshaler.java new file mode 100644 index 00000000000..2d3bf83a6c2 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LineMarshaler.java @@ -0,0 +1,68 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.profiles.v1development.internal.Line; +import java.io.IOException; +import java.util.List; +import java.util.function.Consumer; + +final class LineMarshaler extends MarshalerWithSize { + + private static final LineMarshaler[] EMPTY_REPEATED = new LineMarshaler[0]; + + private final int functionIndex; + private final long line; + private final long column; + + static LineMarshaler create(LineData lineData) { + return new LineMarshaler(lineData.getFunctionIndex(), lineData.getLine(), lineData.getColumn()); + } + + static LineMarshaler[] createRepeated(List items) { + if (items.isEmpty()) { + return EMPTY_REPEATED; + } + + LineMarshaler[] lineMarshalers = new LineMarshaler[items.size()]; + items.forEach( + item -> + new Consumer() { + int index = 0; + + @Override + public void accept(LineData lineData) { + lineMarshalers[index++] = LineMarshaler.create(lineData); + } + }); + return lineMarshalers; + } + + private LineMarshaler(int functionIndex, long line, long column) { + super(calculateSize(functionIndex, line, column)); + this.functionIndex = functionIndex; + this.line = line; + this.column = column; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + output.serializeInt32(Line.FUNCTION_INDEX, functionIndex); + output.serializeInt64(Line.LINE, line); + output.serializeInt64(Line.COLUMN, column); + } + + private static int calculateSize(int functionIndex, long line, long column) { + int size = 0; + size += MarshalerUtil.sizeInt32(Line.FUNCTION_INDEX, functionIndex); + size += MarshalerUtil.sizeInt64(Line.LINE, line); + size += MarshalerUtil.sizeInt64(Line.COLUMN, column); + return size; + } +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LinkData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LinkData.java new file mode 100644 index 00000000000..ea3c1e1d7b8 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LinkData.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import javax.annotation.concurrent.Immutable; + +/** + * A connection from a profile Sample to a trace Span. + * + * @see "profiles.proto::Link" + */ +@Immutable +public interface LinkData { + + /** + * Returns a unique identifier of a trace that this linked span is part of as 32 character + * lowercase hex String. + */ + String getTraceId(); + + /** Returns a unique identifier for the linked span, as 16 character lowercase hex String. */ + String getSpanId(); +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LinkMarshaler.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LinkMarshaler.java new file mode 100644 index 00000000000..ae41176a820 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LinkMarshaler.java @@ -0,0 +1,72 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.api.internal.OtelEncodingUtils; +import io.opentelemetry.api.trace.SpanId; +import io.opentelemetry.api.trace.TraceId; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.profiles.v1development.internal.Link; +import java.io.IOException; +import java.util.List; +import java.util.function.Consumer; + +final class LinkMarshaler extends MarshalerWithSize { + + private static final LinkMarshaler[] EMPTY_REPEATED = new LinkMarshaler[0]; + + private final byte[] traceId; + private final byte[] spanId; + + static LinkMarshaler create(LinkData linkData) { + // in tracing this conversion is handled by utility methods on SpanContext, + // but we don't have a SpanContext here... + byte[] traceId = OtelEncodingUtils.bytesFromBase16(linkData.getTraceId(), TraceId.getLength()); + byte[] spanId = OtelEncodingUtils.bytesFromBase16(linkData.getSpanId(), SpanId.getLength()); + + return new LinkMarshaler(traceId, spanId); + } + + static LinkMarshaler[] createRepeated(List items) { + if (items.isEmpty()) { + return EMPTY_REPEATED; + } + + LinkMarshaler[] linkMarshalers = new LinkMarshaler[items.size()]; + items.forEach( + item -> + new Consumer() { + int index = 0; + + @Override + public void accept(LinkData linkData) { + linkMarshalers[index++] = LinkMarshaler.create(linkData); + } + }); + return linkMarshalers; + } + + private LinkMarshaler(byte[] traceId, byte[] spanId) { + super(calculateSize(traceId, spanId)); + this.traceId = traceId; + this.spanId = spanId; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + output.serializeBytes(Link.TRACE_ID, traceId); + output.serializeBytes(Link.SPAN_ID, spanId); + } + + private static int calculateSize(byte[] traceId, byte[] spanId) { + int size = 0; + size += MarshalerUtil.sizeBytes(Link.TRACE_ID, traceId); + size += MarshalerUtil.sizeBytes(Link.SPAN_ID, spanId); + return size; + } +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LocationData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LocationData.java new file mode 100644 index 00000000000..78fdfc4cdd6 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LocationData.java @@ -0,0 +1,42 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import java.util.List; +import javax.annotation.concurrent.Immutable; + +/** + * Describes function and line table debug information. + * + * @see "profiles.proto::Location" + */ +@Immutable +public interface LocationData { + + /** + * The index of the corresponding profile.Mapping for this location. It can be unset if the + * mapping is unknown or not applicable for this profile type. + */ + Integer getMappingIndex(); + + /** The instruction address for this location, if available. */ + long getAddress(); + + /** + * Multiple line indicates this location has inlined functions, where the last entry represents + * the caller into which the preceding entries were inlined. + */ + List getLines(); + + /** + * Provides an indication that multiple symbols map to this location's address, for example due to + * identical code folding by the linker. + */ + boolean isFolded(); + + /** References to attributes in Profile.attribute_table. */ + List getAttributeIndices(); +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LocationMarshaler.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LocationMarshaler.java new file mode 100644 index 00000000000..40360c0eafc --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/LocationMarshaler.java @@ -0,0 +1,92 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.profiles.v1development.internal.Location; +import java.io.IOException; +import java.util.List; +import java.util.function.Consumer; +import javax.annotation.Nullable; + +final class LocationMarshaler extends MarshalerWithSize { + + private static final LocationMarshaler[] EMPTY_REPEATED = new LocationMarshaler[0]; + + @Nullable private final Integer mappingIndex; + private final long address; + private final LineMarshaler[] lineMarshalers; + private final boolean isFolded; + private final List attributeIndices; + + static LocationMarshaler create(LocationData locationData) { + return new LocationMarshaler( + locationData.getMappingIndex(), + locationData.getAddress(), + LineMarshaler.createRepeated(locationData.getLines()), + locationData.isFolded(), + locationData.getAttributeIndices()); + } + + static LocationMarshaler[] createRepeated(List items) { + if (items.isEmpty()) { + return EMPTY_REPEATED; + } + + LocationMarshaler[] locationMarshalers = new LocationMarshaler[items.size()]; + items.forEach( + item -> + new Consumer() { + int index = 0; + + @Override + public void accept(LocationData locationData) { + locationMarshalers[index++] = LocationMarshaler.create(locationData); + } + }); + return locationMarshalers; + } + + private LocationMarshaler( + @Nullable Integer mappingIndex, + long address, + LineMarshaler[] lineMarshalers, + boolean isFolded, + List attributeIndices) { + super(calculateSize(mappingIndex, address, lineMarshalers, isFolded, attributeIndices)); + this.mappingIndex = mappingIndex; + this.address = address; + this.lineMarshalers = lineMarshalers; + this.isFolded = isFolded; + this.attributeIndices = attributeIndices; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + output.serializeInt32Optional(Location.MAPPING_INDEX, mappingIndex); + output.serializeUInt64(Location.ADDRESS, address); + output.serializeRepeatedMessage(Location.LINE, lineMarshalers); + output.serializeBool(Location.IS_FOLDED, isFolded); + output.serializeRepeatedInt32(Location.ATTRIBUTE_INDICES, attributeIndices); + } + + private static int calculateSize( + @Nullable Integer mappingIndex, + long address, + LineMarshaler[] lineMarshalers, + boolean isFolded, + List attributeIndices) { + int size = 0; + size += MarshalerUtil.sizeInt32Optional(Location.MAPPING_INDEX, mappingIndex); + size += MarshalerUtil.sizeUInt64(Location.ADDRESS, address); + size += MarshalerUtil.sizeRepeatedMessage(Location.LINE, lineMarshalers); + size += MarshalerUtil.sizeBool(Location.IS_FOLDED, isFolded); + size += MarshalerUtil.sizeRepeatedInt32(Location.ATTRIBUTE_INDICES, attributeIndices); + return size; + } +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/MappingData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/MappingData.java new file mode 100644 index 00000000000..1b365bc7ea0 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/MappingData.java @@ -0,0 +1,44 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import java.util.List; +import javax.annotation.concurrent.Immutable; + +/** + * Describes the mapping of a binary in memory. + * + * @see "profiles.proto::Mapping" + */ +@Immutable +public interface MappingData { + + /** Address at which the binary (or DLL) is loaded into memory. */ + long getMemoryStart(); + + /** The limit of the address range occupied by this mapping. */ + long getMemoryLimit(); + + /** Offset in the binary that corresponds to the first mapped address. */ + long getFileOffset(); + + /** + * The object this entry is loaded from. This can be a filename on disk for the main binary and + * shared libraries, or virtual abstraction like "[vdso]". Index into the string table. + */ + int getFilenameStringIndex(); + + /** References to attributes in Profile.attribute_table. */ + List getAttributeIndices(); + + boolean hasFunctions(); + + boolean hasFilenames(); + + boolean hasLineNumbers(); + + boolean hasInlineFrames(); +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/MappingMarshaler.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/MappingMarshaler.java new file mode 100644 index 00000000000..6e39551540f --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/MappingMarshaler.java @@ -0,0 +1,129 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.profiles.v1development.internal.Mapping; +import java.io.IOException; +import java.util.List; +import java.util.function.Consumer; + +final class MappingMarshaler extends MarshalerWithSize { + + private static final MappingMarshaler[] EMPTY_REPEATED = new MappingMarshaler[0]; + + private final long memoryStart; + private final long memoryLimit; + private final long fileOffset; + private final int filenameIndex; + private final List attributeIndices; + private final boolean hasFunctions; + private final boolean hasFilenames; + private final boolean hasLineNumbers; + private final boolean hasInlineFrames; + + static MappingMarshaler create(MappingData mappingData) { + return new MappingMarshaler( + mappingData.getMemoryStart(), + mappingData.getMemoryLimit(), + mappingData.getFileOffset(), + mappingData.getFilenameStringIndex(), + mappingData.getAttributeIndices(), + mappingData.hasFunctions(), + mappingData.hasFilenames(), + mappingData.hasLineNumbers(), + mappingData.hasInlineFrames()); + } + + static MappingMarshaler[] createRepeated(List items) { + if (items.isEmpty()) { + return EMPTY_REPEATED; + } + + MappingMarshaler[] mappingMarshalers = new MappingMarshaler[items.size()]; + items.forEach( + item -> + new Consumer() { + int index = 0; + + @Override + public void accept(MappingData mappingData) { + mappingMarshalers[index++] = MappingMarshaler.create(mappingData); + } + }); + return mappingMarshalers; + } + + private MappingMarshaler( + long memoryStart, + long memoryLimit, + long fileOffset, + int filenameIndex, + List attributeIndices, + boolean hasFunctions, + boolean hasFilenames, + boolean hasLineNumbers, + boolean hasInlineFrames) { + super( + calculateSize( + memoryStart, + memoryLimit, + fileOffset, + filenameIndex, + attributeIndices, + hasFunctions, + hasFilenames, + hasLineNumbers, + hasInlineFrames)); + this.memoryStart = memoryStart; + this.memoryLimit = memoryLimit; + this.fileOffset = fileOffset; + this.filenameIndex = filenameIndex; + this.attributeIndices = attributeIndices; + this.hasFunctions = hasFunctions; + this.hasFilenames = hasFilenames; + this.hasLineNumbers = hasLineNumbers; + this.hasInlineFrames = hasInlineFrames; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + output.serializeUInt64(Mapping.MEMORY_START, memoryStart); + output.serializeUInt64(Mapping.MEMORY_LIMIT, memoryLimit); + output.serializeUInt64(Mapping.FILE_OFFSET, fileOffset); + output.serializeInt32(Mapping.FILENAME_STRINDEX, filenameIndex); + output.serializeRepeatedInt32(Mapping.ATTRIBUTE_INDICES, attributeIndices); + output.serializeBool(Mapping.HAS_FUNCTIONS, hasFunctions); + output.serializeBool(Mapping.HAS_FILENAMES, hasFilenames); + output.serializeBool(Mapping.HAS_LINE_NUMBERS, hasLineNumbers); + output.serializeBool(Mapping.HAS_INLINE_FRAMES, hasInlineFrames); + } + + private static int calculateSize( + long memoryStart, + long memoryLimit, + long fileOffset, + int filenameIndex, + List attributeIndices, + boolean hasFunctions, + boolean hasFilenames, + boolean hasLineNumbers, + boolean hasInlineFrames) { + int size = 0; + size += MarshalerUtil.sizeUInt64(Mapping.MEMORY_START, memoryStart); + size += MarshalerUtil.sizeUInt64(Mapping.MEMORY_LIMIT, memoryLimit); + size += MarshalerUtil.sizeUInt64(Mapping.FILE_OFFSET, fileOffset); + size += MarshalerUtil.sizeInt32(Mapping.FILENAME_STRINDEX, filenameIndex); + size += MarshalerUtil.sizeRepeatedInt32(Mapping.ATTRIBUTE_INDICES, attributeIndices); + size += MarshalerUtil.sizeBool(Mapping.HAS_FUNCTIONS, hasFunctions); + size += MarshalerUtil.sizeBool(Mapping.HAS_FILENAMES, hasFilenames); + size += MarshalerUtil.sizeBool(Mapping.HAS_LINE_NUMBERS, hasLineNumbers); + size += MarshalerUtil.sizeBool(Mapping.HAS_INLINE_FRAMES, hasInlineFrames); + return size; + } +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ProfileData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ProfileData.java new file mode 100644 index 00000000000..0f6ef8e6ea3 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ProfileData.java @@ -0,0 +1,131 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.api.internal.OtelEncodingUtils; +import io.opentelemetry.exporter.internal.otlp.AttributeKeyValue; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; +import java.nio.ByteBuffer; +import java.util.List; +import javax.annotation.Nullable; +import javax.annotation.concurrent.Immutable; + +/** + * Represents a complete profile, including sample types, samples, mappings to binaries, locations, + * functions, string table, and additional metadata. + * + * @see "profiles.proto::Profile" + */ +@Immutable +public interface ProfileData { + + /** Returns the resource of this profile. */ + Resource getResource(); + + /** Returns the instrumentation scope that generated this profile. */ + InstrumentationScopeInfo getInstrumentationScopeInfo(); + + /** A description of the samples associated with each Sample.value. */ + List getSampleTypes(); + + /** The set of samples recorded in this profile. */ + List getSamples(); + + /** + * Mapping from address ranges to the image/binary/library mapped into that address range. + * mapping[0] will be the main binary. + */ + List getMappingTable(); + + /** Locations referenced by samples via location_indices. */ + List getLocationTable(); + + /** Array of locations referenced by samples. */ + List getLocationIndices(); + + /** Functions referenced by locations. */ + List getFunctionTable(); + + /** Lookup table for attributes. */ + List> getAttributeTable(); + + /** Represents a mapping between Attribute Keys and Units. */ + List getAttributeUnits(); + + /** Lookup table for links. */ + List getLinkTable(); + + /** + * A common table for strings referenced by various messages. string_table[0] must always be "". + */ + List getStringTable(); + + /** Time of collection (UTC) represented as nanoseconds past the epoch. */ + long getTimeNanos(); + + /** Duration of the profile, if a duration makes sense. */ + long getDurationNanos(); + + /** + * The kind of events between sampled occurrences. e.g [ "cpu","cycles" ] or [ "heap","bytes" ] + */ + ValueTypeData getPeriodType(); + + /** The number of events between sampled occurrences. */ + long getPeriod(); + + /** Free-form text associated with the profile. Indices into string table. */ + List getCommentStrIndices(); + + /** Type of the preferred sample. Index into the string table. */ + int getDefaultSampleTypeStringIndex(); + + /** + * Returns a globally unique identifier for a profile, as 32 character lowercase hex String. An ID + * with all zeroes is considered invalid. This field is required. + */ + String getProfileId(); + + /** + * Returns a globally unique identifier for a profile, as a 16 bytes array. An ID with all zeroes + * is considered invalid. This field is required. + */ + default byte[] getProfileIdBytes() { + return OtelEncodingUtils.bytesFromBase16(getProfileId(), 32); + } + + /** + * Returns indexes of profile-wide attributes, referencing to Profile.attribute_table. Attribute + * keys MUST be unique (it is not allowed to have more than one attribute with the same key). + * + * @see + * "https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/common/README.md#attribute" + */ + List getAttributeIndices(); + + /** + * Returns the total number of attributes that were recorded on this profile. + * + *

This number may be larger than the number of attributes that are attached to this profile, + * if the total number recorded was greater than the configured maximum value. + */ + int getTotalAttributeCount(); + + /** + * Returns the format of the original payload. Common values are defined in semantic conventions. + * [required if original_payload is present] + */ + @Nullable + String getOriginalPayloadFormat(); + + /** + * Returns the original payload, in a profiler-native format e.g. JFR. Optional. Default behavior + * should be to not include the original payload. If the original payload is in pprof format, it + * SHOULD not be included in this field. + */ + ByteBuffer getOriginalPayload(); +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ProfileMarshaler.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ProfileMarshaler.java new file mode 100644 index 00000000000..8d04bbbf94f --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ProfileMarshaler.java @@ -0,0 +1,235 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.otlp.KeyValueMarshaler; +import io.opentelemetry.proto.profiles.v1development.internal.Profile; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.List; + +final class ProfileMarshaler extends MarshalerWithSize { + + private final ValueTypeMarshaler[] sampleTypeMarshalers; + private final SampleMarshaler[] sampleMarshalers; + private final MappingMarshaler[] mappingTableMarshalers; + private final LocationMarshaler[] locationTableMarshalers; + private final List locationIndices; + private final FunctionMarshaler[] functionTableMarshalers; + private final KeyValueMarshaler[] attributeTableMarshalers; + private final AttributeUnitMarshaler[] attributeUnitMarshalers; + private final LinkMarshaler[] linkTableMarshalers; + private final byte[][] stringTable; + private final long timeNanos; + private final long durationNanos; + private final ValueTypeMarshaler periodTypeMarshaler; + private final long period; + private final List comment; + private final int defaultSampleType; + private final byte[] profileId; + private final List attributeIndices; + private final int droppedAttributesCount; + private final byte[] originalPayloadFormatUtf8; + private final ByteBuffer originalPayload; + + static ProfileMarshaler create(ProfileData profileData) { + + ValueTypeMarshaler[] sampleTypeMarshalers = + ValueTypeMarshaler.createRepeated(profileData.getSampleTypes()); + SampleMarshaler[] sampleMarshalers = SampleMarshaler.createRepeated(profileData.getSamples()); + MappingMarshaler[] mappingMarshalers = + MappingMarshaler.createRepeated(profileData.getMappingTable()); + LocationMarshaler[] locationMarshalers = + LocationMarshaler.createRepeated(profileData.getLocationTable()); + FunctionMarshaler[] functionMarshalers = + FunctionMarshaler.createRepeated(profileData.getFunctionTable()); + KeyValueMarshaler[] attributeTableMarshalers = + KeyValueMarshaler.createRepeated(profileData.getAttributeTable()); + AttributeUnitMarshaler[] attributeUnitsMarshalers = + AttributeUnitMarshaler.createRepeated(profileData.getAttributeUnits()); + LinkMarshaler[] linkMarshalers = LinkMarshaler.createRepeated(profileData.getLinkTable()); + ValueTypeMarshaler periodTypeMarshaler = ValueTypeMarshaler.create(profileData.getPeriodType()); + + byte[][] convertedStrings = new byte[profileData.getStringTable().size()][]; + for (int i = 0; i < profileData.getStringTable().size(); i++) { + convertedStrings[i] = profileData.getStringTable().get(i).getBytes(StandardCharsets.UTF_8); + } + + int droppedAttributesCount = + profileData.getTotalAttributeCount() - profileData.getAttributeIndices().size(); + + return new ProfileMarshaler( + sampleTypeMarshalers, + sampleMarshalers, + mappingMarshalers, + locationMarshalers, + profileData.getLocationIndices(), + functionMarshalers, + attributeTableMarshalers, + attributeUnitsMarshalers, + linkMarshalers, + convertedStrings, + profileData.getTimeNanos(), + profileData.getDurationNanos(), + periodTypeMarshaler, + profileData.getPeriod(), + profileData.getCommentStrIndices(), + profileData.getDefaultSampleTypeStringIndex(), + profileData.getProfileIdBytes(), + profileData.getAttributeIndices(), + droppedAttributesCount, + MarshalerUtil.toBytes(profileData.getOriginalPayloadFormat()), + profileData.getOriginalPayload()); + } + + private ProfileMarshaler( + ValueTypeMarshaler[] sampleTypeMarshalers, + SampleMarshaler[] sampleMarshalers, + MappingMarshaler[] mappingTableMarshalers, + LocationMarshaler[] locationTableMarshalers, + List locationIndices, + FunctionMarshaler[] functionTableMarshalers, + KeyValueMarshaler[] attributeTableMarshalers, + AttributeUnitMarshaler[] attributeUnitMarshalers, + LinkMarshaler[] linkTableMarshalers, + byte[][] stringTableUtf8, + long timeNanos, + long durationNanos, + ValueTypeMarshaler periodTypeMarshaler, + long period, + List comment, + int defaultSampleType, + byte[] profileId, + List attributeIndices, + int droppedAttributesCount, + byte[] originalPayloadFormat, + ByteBuffer originalPayload) { + super( + calculateSize( + sampleTypeMarshalers, + sampleMarshalers, + mappingTableMarshalers, + locationTableMarshalers, + locationIndices, + functionTableMarshalers, + attributeTableMarshalers, + attributeUnitMarshalers, + linkTableMarshalers, + stringTableUtf8, + timeNanos, + durationNanos, + periodTypeMarshaler, + period, + comment, + defaultSampleType, + profileId, + attributeIndices, + droppedAttributesCount, + originalPayloadFormat, + originalPayload)); + this.sampleTypeMarshalers = sampleTypeMarshalers; + this.sampleMarshalers = sampleMarshalers; + this.mappingTableMarshalers = mappingTableMarshalers; + this.locationTableMarshalers = locationTableMarshalers; + this.locationIndices = locationIndices; + this.functionTableMarshalers = functionTableMarshalers; + this.attributeTableMarshalers = attributeTableMarshalers; + this.attributeUnitMarshalers = attributeUnitMarshalers; + this.linkTableMarshalers = linkTableMarshalers; + this.stringTable = stringTableUtf8; + this.timeNanos = timeNanos; + this.durationNanos = durationNanos; + this.periodTypeMarshaler = periodTypeMarshaler; + this.period = period; + this.comment = comment; + this.defaultSampleType = defaultSampleType; + this.profileId = profileId; + this.attributeIndices = attributeIndices; + this.droppedAttributesCount = droppedAttributesCount; + this.originalPayloadFormatUtf8 = originalPayloadFormat; + this.originalPayload = originalPayload; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + output.serializeRepeatedMessage(Profile.SAMPLE_TYPE, sampleTypeMarshalers); + output.serializeRepeatedMessage(Profile.SAMPLE, sampleMarshalers); + output.serializeRepeatedMessage(Profile.MAPPING_TABLE, mappingTableMarshalers); + output.serializeRepeatedMessage(Profile.LOCATION_TABLE, locationTableMarshalers); + output.serializeRepeatedInt32(Profile.LOCATION_INDICES, locationIndices); + output.serializeRepeatedMessage(Profile.FUNCTION_TABLE, functionTableMarshalers); + output.serializeRepeatedMessage(Profile.ATTRIBUTE_TABLE, attributeTableMarshalers); + output.serializeRepeatedMessage(Profile.ATTRIBUTE_UNITS, attributeUnitMarshalers); + output.serializeRepeatedMessage(Profile.LINK_TABLE, linkTableMarshalers); + output.serializeRepeatedString(Profile.STRING_TABLE, stringTable); + output.serializeInt64(Profile.TIME_NANOS, timeNanos); + output.serializeInt64(Profile.DURATION_NANOS, durationNanos); + output.serializeMessage(Profile.PERIOD_TYPE, periodTypeMarshaler); + output.serializeInt64(Profile.PERIOD, period); + output.serializeRepeatedInt32(Profile.COMMENT_STRINDICES, comment); + output.serializeInt32(Profile.DEFAULT_SAMPLE_TYPE_STRINDEX, defaultSampleType); + + output.serializeBytes(Profile.PROFILE_ID, profileId); + output.serializeRepeatedInt32(Profile.ATTRIBUTE_INDICES, attributeIndices); + output.serializeUInt32(Profile.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); + output.serializeString(Profile.ORIGINAL_PAYLOAD_FORMAT, originalPayloadFormatUtf8); + output.serializeByteBuffer(Profile.ORIGINAL_PAYLOAD, originalPayload); + } + + private static int calculateSize( + ValueTypeMarshaler[] sampleTypeMarshalers, + SampleMarshaler[] sampleMarshalers, + MappingMarshaler[] mappingMarshalers, + LocationMarshaler[] locationMarshalers, + List locationIndices, + FunctionMarshaler[] functionMarshalers, + KeyValueMarshaler[] attributeTableMarshalers, + AttributeUnitMarshaler[] attributeUnitMarshalers, + LinkMarshaler[] linkMarshalers, + byte[][] stringTable, + long timeNanos, + long durationNanos, + ValueTypeMarshaler periodTypeMarshaler, + long period, + List comment, + int defaultSampleType, + byte[] profileId, + List attributeIndices, + int droppedAttributesCount, + byte[] originalPayloadFormat, + ByteBuffer originalPayload) { + int size; + size = 0; + size += MarshalerUtil.sizeRepeatedMessage(Profile.SAMPLE_TYPE, sampleTypeMarshalers); + size += MarshalerUtil.sizeRepeatedMessage(Profile.SAMPLE, sampleMarshalers); + size += MarshalerUtil.sizeRepeatedMessage(Profile.MAPPING_TABLE, mappingMarshalers); + size += MarshalerUtil.sizeRepeatedMessage(Profile.LOCATION_TABLE, locationMarshalers); + size += MarshalerUtil.sizeRepeatedInt32(Profile.LOCATION_INDICES, locationIndices); + size += MarshalerUtil.sizeRepeatedMessage(Profile.FUNCTION_TABLE, functionMarshalers); + size += MarshalerUtil.sizeRepeatedMessage(Profile.ATTRIBUTE_TABLE, attributeTableMarshalers); + size += MarshalerUtil.sizeRepeatedMessage(Profile.ATTRIBUTE_UNITS, attributeUnitMarshalers); + size += MarshalerUtil.sizeRepeatedMessage(Profile.LINK_TABLE, linkMarshalers); + size += MarshalerUtil.sizeRepeatedString(Profile.STRING_TABLE, stringTable); + size += MarshalerUtil.sizeInt64(Profile.TIME_NANOS, timeNanos); + size += MarshalerUtil.sizeInt64(Profile.DURATION_NANOS, durationNanos); + size += MarshalerUtil.sizeMessage(Profile.PERIOD_TYPE, periodTypeMarshaler); + size += MarshalerUtil.sizeInt64(Profile.PERIOD, period); + size += MarshalerUtil.sizeRepeatedInt32(Profile.COMMENT_STRINDICES, comment); + size += MarshalerUtil.sizeInt64(Profile.DEFAULT_SAMPLE_TYPE_STRINDEX, defaultSampleType); + + size += MarshalerUtil.sizeBytes(Profile.PROFILE_ID, profileId); + size += MarshalerUtil.sizeRepeatedInt32(Profile.ATTRIBUTE_INDICES, attributeIndices); + size += MarshalerUtil.sizeInt32(Profile.DROPPED_ATTRIBUTES_COUNT, droppedAttributesCount); + size += MarshalerUtil.sizeBytes(Profile.ORIGINAL_PAYLOAD_FORMAT, originalPayloadFormat); + size += MarshalerUtil.sizeByteBuffer(Profile.ORIGINAL_PAYLOAD, originalPayload); + + return size; + } +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ProfilesRequestMarshaler.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ProfilesRequestMarshaler.java new file mode 100644 index 00000000000..3d57825899a --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ProfilesRequestMarshaler.java @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.ProtoFieldInfo; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.collector.profiles.v1development.internal.ExportProfilesServiceRequest; +import java.io.IOException; +import java.util.Collection; + +/** + * {@link Marshaler} to convert SDK {@link ProfileData} to OTLP ExportProfilesServiceRequest. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class ProfilesRequestMarshaler extends MarshalerWithSize { + + private static final ProtoFieldInfo RESOURCE_PROFILES = + ExportProfilesServiceRequest.RESOURCE_PROFILES; + + private final ResourceProfilesMarshaler[] resourceProfilesMarshalers; + + /** + * Returns a {@link ProfilesRequestMarshaler} that can be used to convert the provided {@link + * ProfileData} into a serialized OTLP ExportProfilesServiceRequest. + */ + public static ProfilesRequestMarshaler create(Collection profileList) { + return new ProfilesRequestMarshaler(ResourceProfilesMarshaler.create(profileList)); + } + + private ProfilesRequestMarshaler(ResourceProfilesMarshaler[] resourceProfilesMarshalers) { + super(MarshalerUtil.sizeRepeatedMessage(RESOURCE_PROFILES, resourceProfilesMarshalers)); + this.resourceProfilesMarshalers = resourceProfilesMarshalers; + } + + @Override + public void writeTo(Serializer output) throws IOException { + output.serializeRepeatedMessage(RESOURCE_PROFILES, resourceProfilesMarshalers); + } +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ResourceProfilesMarshaler.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ResourceProfilesMarshaler.java new file mode 100644 index 00000000000..442604cead2 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ResourceProfilesMarshaler.java @@ -0,0 +1,100 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.exporter.internal.otlp.InstrumentationScopeMarshaler; +import io.opentelemetry.exporter.internal.otlp.ResourceMarshaler; +import io.opentelemetry.proto.profiles.v1development.internal.ResourceProfiles; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +final class ResourceProfilesMarshaler extends MarshalerWithSize { + + private final ResourceMarshaler resourceMarshaler; + private final byte[] schemaUrl; + private final InstrumentationScopeProfilesMarshaler[] instrumentationScopeProfilesMarshalers; + + /** Returns Marshalers of ResourceProfiles created by grouping the provided Profiles. */ + @SuppressWarnings("AvoidObjectArrays") + static ResourceProfilesMarshaler[] create(Collection profiles) { + Map>> resourceAndScopeMap = + groupByResourceAndScope(profiles); + + ResourceProfilesMarshaler[] resourceProfilesMarshalers = + new ResourceProfilesMarshaler[resourceAndScopeMap.size()]; + int posResource = 0; + for (Map.Entry>> entry : + resourceAndScopeMap.entrySet()) { + InstrumentationScopeProfilesMarshaler[] instrumentationLibrarySpansMarshalers = + new InstrumentationScopeProfilesMarshaler[entry.getValue().size()]; + int posInstrumentation = 0; + + for (Map.Entry> entryIs : + entry.getValue().entrySet()) { + instrumentationLibrarySpansMarshalers[posInstrumentation++] = + new InstrumentationScopeProfilesMarshaler( + InstrumentationScopeMarshaler.create(entryIs.getKey()), + MarshalerUtil.toBytes(entryIs.getKey().getSchemaUrl()), + entryIs.getValue()); + } + + resourceProfilesMarshalers[posResource++] = + new ResourceProfilesMarshaler( + ResourceMarshaler.create(entry.getKey()), + MarshalerUtil.toBytes(entry.getKey().getSchemaUrl()), + instrumentationLibrarySpansMarshalers); + } + + return resourceProfilesMarshalers; + } + + private ResourceProfilesMarshaler( + ResourceMarshaler resourceMarshaler, + byte[] schemaUrl, + InstrumentationScopeProfilesMarshaler[] instrumentationScopeProfilesMarshalers) { + super(calculateSize(resourceMarshaler, schemaUrl, instrumentationScopeProfilesMarshalers)); + this.resourceMarshaler = resourceMarshaler; + this.schemaUrl = schemaUrl; + this.instrumentationScopeProfilesMarshalers = instrumentationScopeProfilesMarshalers; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + output.serializeMessage(ResourceProfiles.RESOURCE, resourceMarshaler); + output.serializeRepeatedMessage( + ResourceProfiles.SCOPE_PROFILES, instrumentationScopeProfilesMarshalers); + output.serializeString(ResourceProfiles.SCHEMA_URL, schemaUrl); + } + + private static int calculateSize( + ResourceMarshaler resourceMarshaler, + byte[] schemaUrl, + InstrumentationScopeProfilesMarshaler[] instrumentationScopeProfilesMarshalers) { + int size = 0; + size += MarshalerUtil.sizeMessage(ResourceProfiles.RESOURCE, resourceMarshaler); + size += + MarshalerUtil.sizeRepeatedMessage( + ResourceProfiles.SCOPE_PROFILES, instrumentationScopeProfilesMarshalers); + size += MarshalerUtil.sizeBytes(ResourceProfiles.SCHEMA_URL, schemaUrl); + return size; + } + + private static Map>> + groupByResourceAndScope(Collection profiles) { + return MarshalerUtil.groupByResourceAndScope( + profiles, + ProfileData::getResource, + ProfileData::getInstrumentationScopeInfo, + ProfileMarshaler::create); + } +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/SampleData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/SampleData.java new file mode 100644 index 00000000000..ef565b53f86 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/SampleData.java @@ -0,0 +1,49 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import java.util.List; +import javax.annotation.concurrent.Immutable; + +/** + * Each Sample records values encountered in some program context. The program context is typically + * a stack trace, perhaps augmented with auxiliary information like the thread-id, some indicator of + * a higher level request being handled etc. + * + * @see "profiles.proto::Sample" + */ +@Immutable +public interface SampleData { + + /** + * locationsStartIndex along with locationsLength refers to a slice of locations in + * Profile.location. Supersedes locationIndices. + */ + int getLocationsStartIndex(); + + /** + * locationsLength along with locationsStartIndex refers to a slice of locations in + * Profile.location. locationIndices. + */ + int getLocationsLength(); + + /** + * The type and unit of each value is defined by the corresponding entry in Profile.sample_type. + */ + List getValues(); + + /** References to attributes in Profile.attribute_table. */ + List getAttributeIndices(); + + /** Reference to link in Profile.link_table. */ + Integer getLinkIndex(); + + /** + * Timestamps associated with Sample represented in ms. These timestamps are expected to fall + * within the Profile's time range. + */ + List getTimestamps(); +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/SampleMarshaler.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/SampleMarshaler.java new file mode 100644 index 00000000000..5a1517ccf06 --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/SampleMarshaler.java @@ -0,0 +1,103 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.profiles.v1development.internal.Sample; +import java.io.IOException; +import java.util.List; +import java.util.function.Consumer; +import javax.annotation.Nullable; + +final class SampleMarshaler extends MarshalerWithSize { + + private static final SampleMarshaler[] EMPTY_REPEATED = new SampleMarshaler[0]; + + private final int locationsStartIndex; + private final int locationsLength; + private final List values; + private final List attributeIndices; + @Nullable private final Integer linkIndex; + private final List timestamps; + + static SampleMarshaler create(SampleData sampleData) { + + return new SampleMarshaler( + sampleData.getLocationsStartIndex(), + sampleData.getLocationsLength(), + sampleData.getValues(), + sampleData.getAttributeIndices(), + sampleData.getLinkIndex(), + sampleData.getTimestamps()); + } + + static SampleMarshaler[] createRepeated(List items) { + if (items.isEmpty()) { + return EMPTY_REPEATED; + } + + SampleMarshaler[] sampleMarshalers = new SampleMarshaler[items.size()]; + items.forEach( + item -> + new Consumer() { + int index = 0; + + @Override + public void accept(SampleData sampleData) { + sampleMarshalers[index++] = SampleMarshaler.create(sampleData); + } + }); + return sampleMarshalers; + } + + private SampleMarshaler( + int locationsStartIndex, + int locationsLength, + List values, + List attributeIndices, + @Nullable Integer linkIndex, + List timestamps) { + super( + calculateSize( + locationsStartIndex, locationsLength, values, attributeIndices, linkIndex, timestamps)); + this.locationsStartIndex = locationsStartIndex; + this.locationsLength = locationsLength; + this.values = values; + this.attributeIndices = attributeIndices; + this.linkIndex = linkIndex; + this.timestamps = timestamps; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + output.serializeInt32(Sample.LOCATIONS_START_INDEX, locationsStartIndex); + output.serializeInt32(Sample.LOCATIONS_LENGTH, locationsLength); + output.serializeRepeatedInt64(Sample.VALUE, values); + output.serializeRepeatedInt32(Sample.ATTRIBUTE_INDICES, attributeIndices); + output.serializeInt32Optional(Sample.LINK_INDEX, linkIndex); + output.serializeRepeatedUInt64(Sample.TIMESTAMPS_UNIX_NANO, timestamps); + } + + private static int calculateSize( + int locationsStartIndex, + int locationsLength, + List values, + List attributeIndices, + @Nullable Integer linkIndex, + List timestamps) { + int size; + size = 0; + size += MarshalerUtil.sizeInt32(Sample.LOCATIONS_START_INDEX, locationsStartIndex); + size += MarshalerUtil.sizeInt32(Sample.LOCATIONS_LENGTH, locationsLength); + size += MarshalerUtil.sizeRepeatedInt64(Sample.VALUE, values); + size += MarshalerUtil.sizeRepeatedInt32(Sample.ATTRIBUTE_INDICES, attributeIndices); + size += MarshalerUtil.sizeInt32Optional(Sample.LINK_INDEX, linkIndex); + size += MarshalerUtil.sizeRepeatedUInt64(Sample.TIMESTAMPS_UNIX_NANO, timestamps); + return size; + } +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ValueTypeData.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ValueTypeData.java new file mode 100644 index 00000000000..10aac94b71a --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ValueTypeData.java @@ -0,0 +1,27 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import javax.annotation.Nullable; +import javax.annotation.concurrent.Immutable; + +/** + * ValueType describes the type and units of a value, with an optional aggregation temporality. + * + * @see "profiles.proto::ValueType" + */ +@Immutable +public interface ValueTypeData { + + /** Index into string table. */ + int getTypeStringIndex(); + + /** Index into string table. */ + int getUnitStringIndex(); + + @Nullable + AggregationTemporality getAggregationTemporality(); +} diff --git a/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ValueTypeMarshaler.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ValueTypeMarshaler.java new file mode 100644 index 00000000000..a52621b119c --- /dev/null +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/ValueTypeMarshaler.java @@ -0,0 +1,88 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import io.opentelemetry.exporter.internal.marshal.MarshalerUtil; +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.ProtoEnumInfo; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.proto.profiles.v1development.internal.AggregationTemporality; +import io.opentelemetry.proto.profiles.v1development.internal.ValueType; +import java.io.IOException; +import java.util.List; +import java.util.function.Consumer; + +final class ValueTypeMarshaler extends MarshalerWithSize { + + private static final ValueTypeMarshaler[] EMPTY_REPEATED = new ValueTypeMarshaler[0]; + + private final int typeStringIndex; + private final int unitStringIndex; + private final ProtoEnumInfo aggregationTemporality; + + static ValueTypeMarshaler create(ValueTypeData valueTypeData) { + ProtoEnumInfo aggregationTemporality = + AggregationTemporality.AGGREGATION_TEMPORALITY_UNSPECIFIED; + if (valueTypeData.getAggregationTemporality() != null) { + switch (valueTypeData.getAggregationTemporality()) { + case DELTA: + aggregationTemporality = AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA; + break; + case CUMULATIVE: + aggregationTemporality = AggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE; + break; + } + } + return new ValueTypeMarshaler( + valueTypeData.getTypeStringIndex(), + valueTypeData.getUnitStringIndex(), + aggregationTemporality); + } + + static ValueTypeMarshaler[] createRepeated(List items) { + if (items.isEmpty()) { + return EMPTY_REPEATED; + } + + ValueTypeMarshaler[] valueTypeMarshalers = new ValueTypeMarshaler[items.size()]; + items.forEach( + item -> + new Consumer() { + int index = 0; + + @Override + public void accept(ValueTypeData valueTypeData) { + valueTypeMarshalers[index++] = ValueTypeMarshaler.create(valueTypeData); + } + }); + return valueTypeMarshalers; + } + + private ValueTypeMarshaler( + int typeStringIndex, int unitStringIndex, ProtoEnumInfo aggregationTemporality) { + super(calculateSize(typeStringIndex, unitStringIndex, aggregationTemporality)); + this.typeStringIndex = typeStringIndex; + this.unitStringIndex = unitStringIndex; + this.aggregationTemporality = aggregationTemporality; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + output.serializeInt64(ValueType.TYPE_STRINDEX, typeStringIndex); + output.serializeInt64(ValueType.UNIT_STRINDEX, unitStringIndex); + output.serializeEnum(ValueType.AGGREGATION_TEMPORALITY, aggregationTemporality); + } + + private static int calculateSize( + int typeStringIndex, int unitStringIndex, ProtoEnumInfo aggregationTemporality) { + int size; + size = 0; + size += MarshalerUtil.sizeInt32(ValueType.TYPE_STRINDEX, typeStringIndex); + size += MarshalerUtil.sizeInt32(ValueType.UNIT_STRINDEX, unitStringIndex); + size += MarshalerUtil.sizeEnum(ValueType.AGGREGATION_TEMPORALITY, aggregationTemporality); + return size; + } +} diff --git a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/auth/package-info.java b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/package-info.java similarity index 62% rename from exporters/common/src/main/java/io/opentelemetry/exporter/internal/auth/package-info.java rename to exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/package-info.java index 1d79565d574..ec0107701b5 100644 --- a/exporters/common/src/main/java/io/opentelemetry/exporter/internal/auth/package-info.java +++ b/exporters/otlp/profiles/src/main/java/io/opentelemetry/exporter/otlp/profiles/package-info.java @@ -3,8 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ -/** Utilities for exporter authentication. */ +/** The data format to model profiles for export. */ @ParametersAreNonnullByDefault -package io.opentelemetry.exporter.internal.auth; +package io.opentelemetry.exporter.otlp.profiles; import javax.annotation.ParametersAreNonnullByDefault; diff --git a/exporters/otlp/profiles/src/test/java/io/opentelemetry/exporter/otlp/profiles/ProfilesRequestMarshalerTest.java b/exporters/otlp/profiles/src/test/java/io/opentelemetry/exporter/otlp/profiles/ProfilesRequestMarshalerTest.java new file mode 100644 index 00000000000..3ed1d9159d2 --- /dev/null +++ b/exporters/otlp/profiles/src/test/java/io/opentelemetry/exporter/otlp/profiles/ProfilesRequestMarshalerTest.java @@ -0,0 +1,303 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.profiles; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.google.protobuf.ByteString; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.Message; +import com.google.protobuf.util.JsonFormat; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.otlp.internal.data.ImmutableAttributeUnitData; +import io.opentelemetry.exporter.otlp.internal.data.ImmutableFunctionData; +import io.opentelemetry.exporter.otlp.internal.data.ImmutableLineData; +import io.opentelemetry.exporter.otlp.internal.data.ImmutableLinkData; +import io.opentelemetry.exporter.otlp.internal.data.ImmutableLocationData; +import io.opentelemetry.exporter.otlp.internal.data.ImmutableMappingData; +import io.opentelemetry.exporter.otlp.internal.data.ImmutableProfileData; +import io.opentelemetry.exporter.otlp.internal.data.ImmutableSampleData; +import io.opentelemetry.exporter.otlp.internal.data.ImmutableValueTypeData; +import io.opentelemetry.proto.common.v1.InstrumentationScope; +import io.opentelemetry.proto.profiles.v1development.AttributeUnit; +import io.opentelemetry.proto.profiles.v1development.Function; +import io.opentelemetry.proto.profiles.v1development.Line; +import io.opentelemetry.proto.profiles.v1development.Link; +import io.opentelemetry.proto.profiles.v1development.Location; +import io.opentelemetry.proto.profiles.v1development.Mapping; +import io.opentelemetry.proto.profiles.v1development.Profile; +import io.opentelemetry.proto.profiles.v1development.ResourceProfiles; +import io.opentelemetry.proto.profiles.v1development.Sample; +import io.opentelemetry.proto.profiles.v1development.ScopeProfiles; +import io.opentelemetry.proto.profiles.v1development.ValueType; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.resources.Resource; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import org.junit.jupiter.api.Test; + +public class ProfilesRequestMarshalerTest { + @Test + void compareAttributeUnitMarshaling() { + AttributeUnitData input = ImmutableAttributeUnitData.create(1, 2); + AttributeUnit builderResult = + AttributeUnit.newBuilder().setAttributeKeyStrindex(1).setUnitStrindex(2).build(); + + AttributeUnit roundTripResult = + parse(AttributeUnit.getDefaultInstance(), AttributeUnitMarshaler.create(input)); + assertThat(roundTripResult).isEqualTo(builderResult); + } + + @Test + void compareFunctionMarshaling() { + FunctionData input = ImmutableFunctionData.create(1, 2, 3, 4); + Function builderResult = + Function.newBuilder() + .setNameStrindex(1) + .setSystemNameStrindex(2) + .setFilenameStrindex(3) + .setStartLine(4) + .build(); + + Function roundTripResult = + parse(Function.getDefaultInstance(), FunctionMarshaler.create(input)); + assertThat(roundTripResult).isEqualTo(builderResult); + } + + @Test + void compareLineMarshaling() { + LineData input = ImmutableLineData.create(1, 2, 3); + Line builderResult = Line.newBuilder().setFunctionIndex(1).setLine(2).setColumn(3).build(); + + Line roundTripResult = parse(Line.getDefaultInstance(), LineMarshaler.create(input)); + assertThat(roundTripResult).isEqualTo(builderResult); + } + + @Test + void compareLinkMarshaling() { + String traceId = "0123456789abcdef0123456789abcdef"; + String spanId = "fedcba9876543210"; + LinkData input = ImmutableLinkData.create(traceId, spanId); + Link builderResult = + Link.newBuilder() + .setTraceId(ByteString.fromHex(traceId)) + .setSpanId(ByteString.fromHex(spanId)) + .build(); + + Link roundTripResult = parse(Link.getDefaultInstance(), LinkMarshaler.create(input)); + assertThat(roundTripResult).isEqualTo(builderResult); + } + + @Test + void compareLocationMarshaling() { + LocationData input = + ImmutableLocationData.create(1, 2, Collections.emptyList(), true, listOf(4, 5)); + Location builderResult = + Location.newBuilder() + .setMappingIndex(1) + .setAddress(2) + .setIsFolded(true) + .addAllAttributeIndices(listOf(4, 5)) + .build(); + + Location roundTripResult = + parse(Location.getDefaultInstance(), LocationMarshaler.create(input)); + assertThat(roundTripResult).isEqualTo(builderResult); + } + + @Test + void compareMappingMarshaling() { + MappingData input = + ImmutableMappingData.create(1, 2, 3, 4, listOf(5, 6), true, true, true, true); + Mapping builderResult = + Mapping.newBuilder() + .setMemoryStart(1) + .setMemoryLimit(2) + .setFileOffset(3) + .setFilenameStrindex(4) + .addAllAttributeIndices(listOf(5, 6)) + .setHasFunctions(true) + .setHasFilenames(true) + .setHasLineNumbers(true) + .setHasInlineFrames(true) + .build(); + + Mapping roundTripResult = parse(Mapping.getDefaultInstance(), MappingMarshaler.create(input)); + assertThat(roundTripResult).isEqualTo(builderResult); + } + + @Test + void compareResourceProfilesMarshaling() { + + String profileId = "0123456789abcdef0123456789abcdef"; + ProfileData profileContainerData = + ImmutableProfileData.create( + Resource.create(Attributes.empty()), + InstrumentationScopeInfo.create("testscope"), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + listOf(1, 2), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + 5L, + 6L, + ImmutableValueTypeData.create(1, 2, AggregationTemporality.CUMULATIVE), + 7L, + listOf(8, 9), + 0, + profileId, + Collections.emptyList(), + 3, + "format", + ByteBuffer.wrap(new byte[] {4, 5})); + + Collection input = new ArrayList<>(); + input.add(profileContainerData); + + Profile profileContainer = + Profile.newBuilder() + .setProfileId(ByteString.fromHex(profileId)) + .setDroppedAttributesCount(3) + .setOriginalPayloadFormat("format") + .setOriginalPayload(ByteString.copyFrom(new byte[] {4, 5})) + .addAllLocationIndices(listOf(1, 2)) + .setTimeNanos(5) + .setDurationNanos(6) + .setPeriod(7) + .setPeriodType( + ValueType.newBuilder() + .setTypeStrindex(1) + .setUnitStrindex(2) + .setAggregationTemporality( + io.opentelemetry.proto.profiles.v1development.AggregationTemporality + .AGGREGATION_TEMPORALITY_CUMULATIVE) + .build()) + .addAllCommentStrindices(listOf(8, 9)) + .build(); + + ResourceProfiles builderResult = + ResourceProfiles.newBuilder() + .setResource(io.opentelemetry.proto.resource.v1.Resource.newBuilder().build()) + .addScopeProfiles( + ScopeProfiles.newBuilder() + .setScope(InstrumentationScope.newBuilder().setName("testscope").build()) + .addProfiles(profileContainer) + .build()) + .build(); + + ResourceProfilesMarshaler[] marshalers = ResourceProfilesMarshaler.create(input); + assertThat(marshalers.length).isEqualTo(1); + ResourceProfiles roundTripResult = parse(ResourceProfiles.getDefaultInstance(), marshalers[0]); + assertThat(roundTripResult).isEqualTo(builderResult); + } + + @Test + void compareSampleMarshaling() { + SampleData input = + ImmutableSampleData.create(1, 2, listOf(3L, 4L), listOf(5, 6), 7, listOf(8L, 9L)); + Sample builderResult = + Sample.newBuilder() + .setLocationsStartIndex(1) + .setLocationsLength(2) + .addAllValue(listOf(3L, 4L)) + .addAllAttributeIndices(listOf(5, 6)) + .setLinkIndex(7) + .addAllTimestampsUnixNano(listOf(8L, 9L)) + .build(); + + Sample roundTripResult = parse(Sample.getDefaultInstance(), SampleMarshaler.create(input)); + assertThat(roundTripResult).isEqualTo(builderResult); + } + + @Test + void compareValueTypeMarshaling() { + ValueTypeData input = ImmutableValueTypeData.create(1, 2, AggregationTemporality.CUMULATIVE); + ValueType builderResult = + ValueType.newBuilder() + .setTypeStrindex(1) + .setUnitStrindex(2) + .setAggregationTemporality( + io.opentelemetry.proto.profiles.v1development.AggregationTemporality + .AGGREGATION_TEMPORALITY_CUMULATIVE) + .build(); + + ValueType roundTripResult = + parse(ValueType.getDefaultInstance(), ValueTypeMarshaler.create(input)); + assertThat(roundTripResult).isEqualTo(builderResult); + } + + private static List listOf(T a, T b) { + ArrayList list = new ArrayList<>(); + list.add(a); + list.add(b); + return Collections.unmodifiableList(list); + } + + @SuppressWarnings("unchecked") + private static T parse(T prototype, Marshaler marshaler) { + byte[] serialized = toByteArray(marshaler); + T result; + try { + result = (T) prototype.newBuilderForType().mergeFrom(serialized).build(); + } catch (InvalidProtocolBufferException e) { + throw new UncheckedIOException(e); + } + // Our marshaler should produce the exact same length of serialized output (for example, field + // default values are not outputted), so we check that here. The output itself may have slightly + // different ordering, mostly due to the way we don't output oneof values in field order all the + // tieme. If the lengths are equal and the resulting protos are equal, the marshaling is + // guaranteed to be valid. + assertThat(result.getSerializedSize()).isEqualTo(serialized.length); + + // We don't compare JSON strings due to some differences (particularly serializing enums as + // numbers instead of names). This may improve in the future but what matters is what we produce + // can be parsed. + String json = toJson(marshaler); + Message.Builder builder = prototype.newBuilderForType(); + try { + JsonFormat.parser().merge(json, builder); + } catch (InvalidProtocolBufferException e) { + throw new UncheckedIOException(e); + } + + assertThat(builder.build()).isEqualTo(result); + + return result; + } + + private static byte[] toByteArray(Marshaler marshaler) { + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + try { + marshaler.writeBinaryTo(bos); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + return bos.toByteArray(); + } + + private static String toJson(Marshaler marshaler) { + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + try { + marshaler.writeJsonTo(bos); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + return new String(bos.toByteArray(), StandardCharsets.UTF_8); + } +} diff --git a/exporters/otlp/testing-internal/build.gradle.kts b/exporters/otlp/testing-internal/build.gradle.kts index 2e491e84c13..2c7c3cc9bd8 100644 --- a/exporters/otlp/testing-internal/build.gradle.kts +++ b/exporters/otlp/testing-internal/build.gradle.kts @@ -31,6 +31,7 @@ dependencies { implementation("com.linecorp.armeria:armeria-junit5") implementation("io.github.netmikey.logunit:logunit-jul") implementation("org.assertj:assertj-core") + implementation("org.mock-server:mockserver-netty") } // Skip OWASP dependencyCheck task on test module diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/AbstractGrpcTelemetryExporterTest.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/AbstractGrpcTelemetryExporterTest.java index 5c2c6b665a7..d8caa8ad22d 100644 --- a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/AbstractGrpcTelemetryExporterTest.java +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/AbstractGrpcTelemetryExporterTest.java @@ -5,6 +5,7 @@ package io.opentelemetry.exporter.otlp.testing.internal; +import static org.assertj.core.api.Assertions.as; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; @@ -15,6 +16,7 @@ import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Message; import com.linecorp.armeria.common.HttpRequest; +import com.linecorp.armeria.common.TlsKeyPair; import com.linecorp.armeria.common.grpc.protocol.ArmeriaStatusException; import com.linecorp.armeria.server.ServerBuilder; import com.linecorp.armeria.server.ServiceRequestContext; @@ -23,9 +25,15 @@ import com.linecorp.armeria.testing.junit5.server.SelfSignedCertificateExtension; import com.linecorp.armeria.testing.junit5.server.ServerExtension; import io.github.netmikey.logunit.api.LogCapturer; +import io.grpc.ManagedChannel; +import io.opentelemetry.exporter.internal.FailedExportException; import io.opentelemetry.exporter.internal.TlsUtil; +import io.opentelemetry.exporter.internal.compression.GzipCompressor; import io.opentelemetry.exporter.internal.grpc.GrpcExporter; +import io.opentelemetry.exporter.internal.grpc.GrpcResponse; +import io.opentelemetry.exporter.internal.grpc.MarshalerServiceStub; import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.otlp.testing.internal.compressor.Base64Compressor; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest; import io.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse; @@ -39,6 +47,7 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.lang.reflect.Field; +import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.security.cert.CertificateEncodingException; @@ -46,10 +55,12 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Enumeration; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; @@ -61,6 +72,8 @@ import javax.net.ssl.TrustManager; import javax.net.ssl.X509KeyManager; import javax.net.ssl.X509TrustManager; +import org.assertj.core.api.Assertions; +import org.assertj.core.api.InstanceOfAssertFactories; import org.assertj.core.api.iterable.ThrowingExtractor; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; @@ -128,7 +141,7 @@ protected void configure(ServerBuilder sb) { sb.http(0); sb.https(0); - sb.tls(certificate.certificateFile(), certificate.privateKeyFile()); + sb.tls(TlsKeyPair.of(certificate.privateKey(), certificate.certificate())); sb.tlsCustomizer(ssl -> ssl.trustManager(clientCertificate.certificate())); sb.decorator(LoggingService.newDecorator()); } @@ -181,7 +194,17 @@ protected AbstractGrpcTelemetryExporterTest(String type, U resourceTelemetryInst @BeforeAll void setUp() { - exporter = exporterBuilder().setEndpoint(server.httpUri().toString()).build(); + exporter = + exporterBuilder() + .setEndpoint(server.httpUri().toString()) + // We don't validate backoff time itself in these tests, just that retries + // occur. Keep the tests fast by using minimal backoff. + .setRetryPolicy( + RetryPolicy.getDefault().toBuilder() + .setMaxAttempts(2) + .setInitialBackoff(Duration.ofMillis(1)) + .build()) + .build(); // Sanity check that TLS files are in PEM format. assertThat(certificate.certificateFile()) @@ -215,6 +238,23 @@ void reset() { httpRequests.clear(); } + @Test + void minimalChannel() { + // Test that UpstreamGrpcSender uses minimal fallback managed channel, so skip for + // OkHttpGrpcSender + assumeThat(exporter.unwrap()) + .extracting("delegate.grpcSender") + .matches(sender -> sender.getClass().getSimpleName().equals("UpstreamGrpcSender")); + // When no channel is explicitly set, should fall back to a minimally configured managed channel + TelemetryExporter exporter = exporterBuilder().build(); + assertThat(exporter.shutdown().join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); + assertThat(exporter.unwrap()) + .extracting( + "delegate.grpcSender.stub", + as(InstanceOfAssertFactories.type(MarshalerServiceStub.class))) + .satisfies(stub -> assertThat(((ManagedChannel) stub.getChannel()).isShutdown()).isTrue()); + } + @Test void export() { List telemetry = Collections.singletonList(generateFakeTelemetry()); @@ -251,9 +291,7 @@ void compressionWithNone() { assumeThat(exporter.unwrap()) .extracting("delegate.grpcSender") .matches(sender -> sender.getClass().getSimpleName().equals("OkHttpGrpcSender")); - assertThat(exporter.unwrap()) - .extracting("delegate.grpcSender.compressionEnabled") - .isEqualTo(false); + assertThat(exporter.unwrap()).extracting("delegate.grpcSender.compressor").isNull(); } finally { exporter.shutdown(); } @@ -269,8 +307,25 @@ void compressionWithGzip() { .extracting("delegate.grpcSender") .matches(sender -> sender.getClass().getSimpleName().equals("OkHttpGrpcSender")); assertThat(exporter.unwrap()) - .extracting("delegate.grpcSender.compressionEnabled") - .isEqualTo(true); + .extracting("delegate.grpcSender.compressor") + .isEqualTo(GzipCompressor.getInstance()); + } finally { + exporter.shutdown(); + } + } + + @Test + void compressionWithSpiCompressor() { + TelemetryExporter exporter = + exporterBuilder().setEndpoint(server.httpUri().toString()).setCompression("base64").build(); + try { + // UpstreamGrpcSender doesn't support compression, so we skip the assertion + assumeThat(exporter.unwrap()) + .extracting("delegate.grpcSender") + .matches(sender -> sender.getClass().getSimpleName().equals("OkHttpGrpcSender")); + assertThat(exporter.unwrap()) + .extracting("delegate.grpcSender.compressor") + .isEqualTo(Base64Compressor.getInstance()); } finally { exporter.shutdown(); } @@ -291,18 +346,31 @@ void authorityWithAuth() { @Test void withHeaders() { + AtomicInteger count = new AtomicInteger(); TelemetryExporter exporter = exporterBuilder() .setEndpoint(server.httpUri().toString()) - .addHeader("key", "value") + .addHeader("key1", "value1") + .setHeaders(() -> Collections.singletonMap("key2", "value" + count.incrementAndGet())) .build(); try { + // Export twice to ensure header supplier gets invoked twice CompletableResultCode result = exporter.export(Collections.singletonList(generateFakeTelemetry())); assertThat(result.join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); + result = exporter.export(Collections.singletonList(generateFakeTelemetry())); + assertThat(result.join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); + assertThat(httpRequests) - .singleElement() - .satisfies(req -> assertThat(req.headers().get("key")).isEqualTo("value")); + .satisfiesExactly( + req -> { + assertThat(req.headers().get("key1")).isEqualTo("value1"); + assertThat(req.headers().get("key2")).isEqualTo("value" + (count.get() - 1)); + }, + req -> { + assertThat(req.headers().get("key1")).isEqualTo("value1"); + assertThat(req.headers().get("key2")).isEqualTo("value" + count.get()); + }); } finally { exporter.shutdown(); } @@ -405,6 +473,46 @@ public Stream provideArguments(ExtensionContext context) th } } + @Test + @SuppressLogger(GrpcExporter.class) + void connectTimeout() { + // UpstreamGrpcSender doesn't support connectTimeout, so we skip the test + assumeThat(exporter.unwrap()) + .extracting("delegate.grpcSender") + .matches(sender -> sender.getClass().getSimpleName().equals("OkHttpGrpcSender")); + + TelemetryExporter exporter = + exporterBuilder() + // Connecting to a non-routable IP address to trigger connection error + .setEndpoint("http://10.255.255.1") + .setConnectTimeout(Duration.ofMillis(1)) + .setRetryPolicy(null) + .build(); + try { + long startTimeMillis = System.currentTimeMillis(); + CompletableResultCode result = + exporter.export(Collections.singletonList(generateFakeTelemetry())); + + assertThat(result.join(10, TimeUnit.SECONDS).isSuccess()).isFalse(); + + assertThat(result.getFailureThrowable()) + .asInstanceOf( + InstanceOfAssertFactories.throwable(FailedExportException.GrpcExportException.class)) + .returns(false, Assertions.from(FailedExportException::failedWithResponse)) + .satisfies( + ex -> { + assertThat(ex.getResponse()).isNull(); + assertThat(ex.getCause()).isNotNull(); + }); + + // Assert that the export request fails well before the default connect timeout of 10s + assertThat(System.currentTimeMillis() - startTimeMillis) + .isLessThan(TimeUnit.SECONDS.toMillis(1)); + } finally { + exporter.shutdown(); + } + } + @Test void deadlineSetPerExport() throws InterruptedException { TelemetryExporter exporter = @@ -451,122 +559,102 @@ void doubleShutdown() { @Test @SuppressLogger(GrpcExporter.class) void error() { - addGrpcError(13, null); - assertThat( - exporter - .export(Collections.singletonList(generateFakeTelemetry())) - .join(10, TimeUnit.SECONDS) - .isSuccess()) - .isFalse(); - LoggingEvent log = - logs.assertContains( - "Failed to export " - + type - + "s. Server responded with gRPC status code 13. Error message:"); - assertThat(log.getLevel()).isEqualTo(Level.WARN); - } + int statusCode = 13; + addGrpcError(statusCode, null); - @Test - @SuppressLogger(GrpcExporter.class) - void errorWithMessage() { - addGrpcError(8, "out of quota"); - assertThat( - exporter - .export(Collections.singletonList(generateFakeTelemetry())) - .join(10, TimeUnit.SECONDS) - .isSuccess()) - .isFalse(); - LoggingEvent log = - logs.assertContains( - "Failed to export " - + type - + "s. Server responded with gRPC status code 8. Error message: out of quota"); - assertThat(log.getLevel()).isEqualTo(Level.WARN); - } + TelemetryExporter exporter = nonRetryingExporter(); - @Test - @SuppressLogger(GrpcExporter.class) - void errorWithEscapedMessage() { - addGrpcError(5, "クマ🐻"); - assertThat( - exporter - .export(Collections.singletonList(generateFakeTelemetry())) - .join(10, TimeUnit.SECONDS) - .isSuccess()) - .isFalse(); - LoggingEvent log = - logs.assertContains( - "Failed to export " - + type - + "s. Server responded with gRPC status code 5. Error message: クマ🐻"); - assertThat(log.getLevel()).isEqualTo(Level.WARN); + try { + CompletableResultCode result = + exporter + .export(Collections.singletonList(generateFakeTelemetry())) + .join(10, TimeUnit.SECONDS); + + assertThat(result.isSuccess()).isFalse(); + + assertThat(result.getFailureThrowable()) + .asInstanceOf( + InstanceOfAssertFactories.throwable(FailedExportException.GrpcExportException.class)) + .returns(true, Assertions.from(FailedExportException::failedWithResponse)) + .satisfies( + ex -> { + assertThat(ex.getResponse()) + .isNotNull() + .extracting(GrpcResponse::grpcStatusValue) + .isEqualTo(statusCode); + + assertThat(ex.getCause()).isNull(); + }); + + LoggingEvent log = + logs.assertContains( + "Failed to export " + + type + + "s. Server responded with gRPC status code 13. Error message:"); + assertThat(log.getLevel()).isEqualTo(Level.WARN); + } finally { + exporter.shutdown(); + } } @Test @SuppressLogger(GrpcExporter.class) - void testExport_Unavailable() { - addGrpcError(14, null); - assertThat( - exporter - .export(Collections.singletonList(generateFakeTelemetry())) - .join(10, TimeUnit.SECONDS) - .isSuccess()) - .isFalse(); - LoggingEvent log = - logs.assertContains( - "Failed to export " - + type - + "s. Server is UNAVAILABLE. " - + "Make sure your collector is running and reachable from this network."); - assertThat(log.getLevel()).isEqualTo(Level.ERROR); + void errorWithUnknownError() { + addGrpcError(2, null); + + TelemetryExporter exporter = nonRetryingExporter(); + + try { + assertThat( + exporter + .export(Collections.singletonList(generateFakeTelemetry())) + .join(10, TimeUnit.SECONDS) + .getFailureThrowable()) + .asInstanceOf( + InstanceOfAssertFactories.throwable(FailedExportException.GrpcExportException.class)) + .returns(true, Assertions.from(FailedExportException::failedWithResponse)) + .satisfies( + ex -> { + assertThat(ex.getResponse()).isNotNull(); + + assertThat(ex.getCause()).isNull(); + }); + } finally { + exporter.shutdown(); + } } @Test @SuppressLogger(GrpcExporter.class) - void testExport_Unimplemented() { - addGrpcError(12, "UNIMPLEMENTED"); - assertThat( - exporter - .export(Collections.singletonList(generateFakeTelemetry())) - .join(10, TimeUnit.SECONDS) - .isSuccess()) - .isFalse(); - String envVar; - switch (type) { - case "span": - envVar = "OTEL_TRACES_EXPORTER"; - break; - case "metric": - envVar = "OTEL_METRICS_EXPORTER"; - break; - case "log": - envVar = "OTEL_LOGS_EXPORTER"; - break; - default: - throw new AssertionError(); + void errorWithMessage() { + addGrpcError(8, "out of quota"); + + TelemetryExporter exporter = nonRetryingExporter(); + + try { + assertThat( + exporter + .export(Collections.singletonList(generateFakeTelemetry())) + .join(10, TimeUnit.SECONDS) + .isSuccess()) + .isFalse(); + LoggingEvent log = + logs.assertContains( + "Failed to export " + + type + + "s. Server responded with gRPC status code 8. Error message: out of quota"); + assertThat(log.getLevel()).isEqualTo(Level.WARN); + } finally { + exporter.shutdown(); } - LoggingEvent log = - logs.assertContains( - "Failed to export " - + type - + "s. Server responded with UNIMPLEMENTED. " - + "This usually means that your collector is not configured with an otlp " - + "receiver in the \"pipelines\" section of the configuration. " - + "If export is not desired and you are using OpenTelemetry autoconfiguration or the javaagent, " - + "disable export by setting " - + envVar - + "=none. " - + "Full error message: UNIMPLEMENTED"); - assertThat(log.getLevel()).isEqualTo(Level.ERROR); } - @ParameterizedTest - @ValueSource(ints = {1, 4, 8, 10, 11, 14, 15}) + @Test @SuppressLogger(GrpcExporter.class) - void retryableError(int code) { - addGrpcError(code, null); + void errorWithEscapedMessage() { + addGrpcError(5, "クマ🐻"); - TelemetryExporter exporter = retryingExporter(); + TelemetryExporter exporter = nonRetryingExporter(); try { assertThat( @@ -574,21 +662,24 @@ void retryableError(int code) { .export(Collections.singletonList(generateFakeTelemetry())) .join(10, TimeUnit.SECONDS) .isSuccess()) - .isTrue(); + .isFalse(); + LoggingEvent log = + logs.assertContains( + "Failed to export " + + type + + "s. Server responded with gRPC status code 5. Error message: クマ🐻"); + assertThat(log.getLevel()).isEqualTo(Level.WARN); } finally { exporter.shutdown(); } - - assertThat(attempts).hasValue(2); } @Test @SuppressLogger(GrpcExporter.class) - void retryableError_tooManyAttempts() { - addGrpcError(1, null); - addGrpcError(1, null); + void testExport_Unavailable() { + addGrpcError(14, null); - TelemetryExporter exporter = retryingExporter(); + TelemetryExporter exporter = nonRetryingExporter(); try { assertThat( @@ -597,20 +688,24 @@ void retryableError_tooManyAttempts() { .join(10, TimeUnit.SECONDS) .isSuccess()) .isFalse(); + LoggingEvent log = + logs.assertContains( + "Failed to export " + + type + + "s. Server is UNAVAILABLE. " + + "Make sure your collector is running and reachable from this network."); + assertThat(log.getLevel()).isEqualTo(Level.ERROR); } finally { exporter.shutdown(); } - - assertThat(attempts).hasValue(2); } - @ParameterizedTest - @ValueSource(ints = {2, 3, 5, 6, 7, 9, 12, 13, 16}) + @Test @SuppressLogger(GrpcExporter.class) - void nonRetryableError(int code) { - addGrpcError(code, null); + void testExport_Unimplemented() { + addGrpcError(12, "UNIMPLEMENTED"); - TelemetryExporter exporter = retryingExporter(); + TelemetryExporter exporter = nonRetryingExporter(); try { assertThat( @@ -619,9 +714,82 @@ void nonRetryableError(int code) { .join(10, TimeUnit.SECONDS) .isSuccess()) .isFalse(); + String envVar; + switch (type) { + case "span": + envVar = "OTEL_TRACES_EXPORTER"; + break; + case "metric": + envVar = "OTEL_METRICS_EXPORTER"; + break; + case "log": + envVar = "OTEL_LOGS_EXPORTER"; + break; + default: + throw new AssertionError(); + } + LoggingEvent log = + logs.assertContains( + "Failed to export " + + type + + "s. Server responded with UNIMPLEMENTED. " + + "This usually means that your collector is not configured with an otlp " + + "receiver in the \"pipelines\" section of the configuration. " + + "If export is not desired and you are using OpenTelemetry autoconfiguration or the javaagent, " + + "disable export by setting " + + envVar + + "=none. " + + "Full error message: UNIMPLEMENTED"); + assertThat(log.getLevel()).isEqualTo(Level.ERROR); } finally { exporter.shutdown(); } + } + + @ParameterizedTest + @ValueSource(ints = {1, 4, 8, 10, 11, 14, 15}) + @SuppressLogger(GrpcExporter.class) + void retryableError(int code) { + addGrpcError(code, null); + + assertThat( + exporter + .export(Collections.singletonList(generateFakeTelemetry())) + .join(10, TimeUnit.SECONDS) + .isSuccess()) + .isTrue(); + + assertThat(attempts).hasValue(2); + } + + @Test + @SuppressLogger(GrpcExporter.class) + void retryableError_tooManyAttempts() { + addGrpcError(1, null); + addGrpcError(1, null); + + assertThat( + exporter + .export(Collections.singletonList(generateFakeTelemetry())) + .join(10, TimeUnit.SECONDS) + .isSuccess()) + .isFalse(); + + assertThat(attempts).hasValue(2); + } + + @ParameterizedTest + @ValueSource(ints = {2, 3, 5, 6, 7, 9, 12, 13, 16}) + @SuppressLogger(GrpcExporter.class) + void nonRetryableError(int code) { + addGrpcError(code, null); + + assertThat( + exporter + .export(Collections.singletonList(generateFakeTelemetry())) + .join(10, TimeUnit.SECONDS) + .isSuccess()) + .isFalse(); assertThat(attempts).hasValue(1); } @@ -647,16 +815,67 @@ void overrideHost() { .satisfies(req -> assertThat(req.authority()).isEqualTo("opentelemetry")); } + @Test + void executorService() { + ExecutorServiceSpy executorService = + new ExecutorServiceSpy(Executors.newSingleThreadExecutor()); + + TelemetryExporter exporter = + exporterBuilder() + .setEndpoint(server.httpUri().toString()) + .setExecutorService(executorService) + .build(); + + try { + CompletableResultCode result = + exporter.export(Collections.singletonList(generateFakeTelemetry())); + + assertThat(result.join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); + assertThat(executorService.getTaskCount()).isPositive(); + } finally { + exporter.shutdown(); + // If setting executor, the user is responsible for calling shutdown + assertThat(executorService.isShutdown()).isFalse(); + executorService.shutdown(); + } + } + @Test @SuppressWarnings("PreferJavaTimeOverload") void validConfig() { - assertThatCode(() -> exporterBuilder().setTimeout(0, TimeUnit.MILLISECONDS)) + // We must build exporters to test timeout settings, which intersect with underlying client + // implementations and may convert between Duration, int, and long, which may be susceptible to + // overflow exceptions. + assertThatCode(() -> buildAndShutdown(exporterBuilder().setTimeout(0, TimeUnit.MILLISECONDS))) .doesNotThrowAnyException(); - assertThatCode(() -> exporterBuilder().setTimeout(Duration.ofMillis(0))) + assertThatCode(() -> buildAndShutdown(exporterBuilder().setTimeout(Duration.ofMillis(0)))) .doesNotThrowAnyException(); - assertThatCode(() -> exporterBuilder().setTimeout(10, TimeUnit.MILLISECONDS)) + assertThatCode( + () -> + buildAndShutdown( + exporterBuilder().setTimeout(Long.MAX_VALUE, TimeUnit.NANOSECONDS))) .doesNotThrowAnyException(); - assertThatCode(() -> exporterBuilder().setTimeout(Duration.ofMillis(10))) + assertThatCode( + () -> buildAndShutdown(exporterBuilder().setTimeout(Duration.ofNanos(Long.MAX_VALUE)))) + .doesNotThrowAnyException(); + assertThatCode( + () -> buildAndShutdown(exporterBuilder().setTimeout(Long.MAX_VALUE, TimeUnit.SECONDS))) + .doesNotThrowAnyException(); + assertThatCode(() -> buildAndShutdown(exporterBuilder().setTimeout(10, TimeUnit.MILLISECONDS))) + .doesNotThrowAnyException(); + assertThatCode(() -> buildAndShutdown(exporterBuilder().setTimeout(Duration.ofMillis(10)))) + .doesNotThrowAnyException(); + assertThatCode( + () -> buildAndShutdown(exporterBuilder().setConnectTimeout(0, TimeUnit.MILLISECONDS))) + .doesNotThrowAnyException(); + assertThatCode( + () -> buildAndShutdown(exporterBuilder().setConnectTimeout(Duration.ofMillis(0)))) + .doesNotThrowAnyException(); + assertThatCode( + () -> buildAndShutdown(exporterBuilder().setConnectTimeout(10, TimeUnit.MILLISECONDS))) + .doesNotThrowAnyException(); + assertThatCode( + () -> buildAndShutdown(exporterBuilder().setConnectTimeout(Duration.ofMillis(10)))) .doesNotThrowAnyException(); assertThatCode(() -> exporterBuilder().setEndpoint("http://localhost:4317")) @@ -669,6 +888,8 @@ void validConfig() { .doesNotThrowAnyException(); assertThatCode(() -> exporterBuilder().setCompression("gzip")).doesNotThrowAnyException(); + // SPI compressor available for this test but not packaged with OTLP exporter + assertThatCode(() -> exporterBuilder().setCompression("base64")).doesNotThrowAnyException(); assertThatCode(() -> exporterBuilder().setCompression("none")).doesNotThrowAnyException(); assertThatCode(() -> exporterBuilder().addHeader("foo", "bar").addHeader("baz", "qux")) @@ -679,6 +900,44 @@ void validConfig() { .doesNotThrowAnyException(); } + @Test + void customServiceClassLoader() { + ClassLoaderSpy classLoaderSpy = + new ClassLoaderSpy(AbstractHttpTelemetryExporterTest.class.getClassLoader()); + + TelemetryExporter exporter = + exporterBuilder() + .setServiceClassLoader(classLoaderSpy) + .setEndpoint(server.httpUri().toString()) + .build(); + + assertThat(classLoaderSpy.getResourcesNames) + .isEqualTo( + Collections.singletonList( + "META-INF/services/io.opentelemetry.exporter.internal.grpc.GrpcSenderProvider")); + + exporter.shutdown(); + } + + private static class ClassLoaderSpy extends ClassLoader { + private final List getResourcesNames = new ArrayList<>(); + + private ClassLoaderSpy(ClassLoader delegate) { + super(delegate); + } + + @Override + public Enumeration getResources(String name) throws IOException { + getResourcesNames.add(name); + return super.getResources(name); + } + } + + private void buildAndShutdown(TelemetryExporterBuilder builder) { + TelemetryExporter build = builder.build(); + build.shutdown().join(10, TimeUnit.MILLISECONDS); + } + @Test @SuppressWarnings({"PreferJavaTimeOverload", "NullAway"}) void invalidConfig() { @@ -691,6 +950,25 @@ void invalidConfig() { assertThatThrownBy(() -> exporterBuilder().setTimeout(null)) .isInstanceOf(NullPointerException.class) .hasMessage("timeout"); + assertThatThrownBy( + () -> + buildAndShutdown(exporterBuilder().setTimeout(Duration.ofSeconds(Long.MAX_VALUE)))) + .isInstanceOf(ArithmeticException.class); + + assertThatThrownBy(() -> exporterBuilder().setConnectTimeout(-1, TimeUnit.MILLISECONDS)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("timeout must be non-negative"); + assertThatThrownBy(() -> exporterBuilder().setConnectTimeout(1, null)) + .isInstanceOf(NullPointerException.class) + .hasMessage("unit"); + assertThatThrownBy(() -> exporterBuilder().setConnectTimeout(null)) + .isInstanceOf(NullPointerException.class) + .hasMessage("timeout"); + assertThatThrownBy( + () -> + buildAndShutdown( + exporterBuilder().setConnectTimeout(Duration.ofSeconds(Long.MAX_VALUE)))) + .isInstanceOf(ArithmeticException.class); assertThatThrownBy(() -> exporterBuilder().setEndpoint(null)) .isInstanceOf(NullPointerException.class) @@ -711,7 +989,7 @@ void invalidConfig() { assertThatThrownBy(() -> exporterBuilder().setCompression("foo")) .isInstanceOf(IllegalArgumentException.class) .hasMessage( - "Unsupported compression method. Supported compression methods include: gzip, none."); + "Unsupported compressionMethod. Compression method must be \"none\" or one of: [base64,gzip]"); } @Test @@ -787,9 +1065,12 @@ void stringRepresentation() throws IOException, CertificateEncodingException { + "timeoutNanos=" + TimeUnit.SECONDS.toNanos(10) + ", " - + "compressionEnabled=false, " + + "connectTimeoutNanos=" + + TimeUnit.SECONDS.toNanos(10) + + ", " + + "compressorEncoding=null, " + "headers=Headers\\{User-Agent=OBFUSCATED\\}" - + ".*" // Maybe additional grpcChannel field + + ".*" // Maybe additional grpcChannel field, signal specific fields + "\\}"); } finally { telemetryExporter.shutdown(); @@ -798,6 +1079,7 @@ void stringRepresentation() throws IOException, CertificateEncodingException { telemetryExporter = exporterBuilder() .setTimeout(Duration.ofSeconds(5)) + .setConnectTimeout(Duration.ofSeconds(4)) .setEndpoint("http://example:4317") .setCompression("gzip") .addHeader("foo", "bar") @@ -824,10 +1106,13 @@ void stringRepresentation() throws IOException, CertificateEncodingException { + "timeoutNanos=" + TimeUnit.SECONDS.toNanos(5) + ", " - + "compressionEnabled=true, " + + "connectTimeoutNanos=" + + TimeUnit.SECONDS.toNanos(4) + + ", " + + "compressorEncoding=gzip, " + "headers=Headers\\{.*foo=OBFUSCATED.*\\}, " - + "retryPolicy=RetryPolicy\\{maxAttempts=2, initialBackoff=PT0\\.05S, maxBackoff=PT3S, backoffMultiplier=1\\.3\\}" - + ".*" // Maybe additional grpcChannel field + + "retryPolicy=RetryPolicy\\{maxAttempts=2, initialBackoff=PT0\\.05S, maxBackoff=PT3S, backoffMultiplier=1\\.3, retryExceptionPredicate=null\\}" + + ".*" // Maybe additional grpcChannel field, signal specific fields + "\\}"); } finally { telemetryExporter.shutdown(); @@ -864,19 +1149,8 @@ private List toProto(List telemetry) { .collect(Collectors.toList()); } - private TelemetryExporter retryingExporter() { - return exporterBuilder() - .setEndpoint(server.httpUri().toString()) - .setRetryPolicy( - RetryPolicy.builder() - .setMaxAttempts(2) - // We don't validate backoff time itself in these tests, just that retries - // occur. Keep the tests fast by using minimal backoff. - .setInitialBackoff(Duration.ofMillis(1)) - .setMaxBackoff(Duration.ofMillis(1)) - .setBackoffMultiplier(1) - .build()) - .build(); + private TelemetryExporter nonRetryingExporter() { + return exporterBuilder().setEndpoint(server.httpUri().toString()).setRetryPolicy(null).build(); } private static void addGrpcError(int code, @Nullable String message) { diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/AbstractHttpTelemetryExporterTest.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/AbstractHttpTelemetryExporterTest.java index be8e5e773fc..705593c65fb 100644 --- a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/AbstractHttpTelemetryExporterTest.java +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/AbstractHttpTelemetryExporterTest.java @@ -8,7 +8,6 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.assertj.core.api.Assumptions.assumeThat; import static org.junit.jupiter.api.Named.named; import static org.junit.jupiter.params.provider.Arguments.arguments; @@ -19,16 +18,20 @@ import com.linecorp.armeria.common.HttpStatus; import com.linecorp.armeria.common.MediaType; import com.linecorp.armeria.common.RequestHeaders; +import com.linecorp.armeria.common.TlsKeyPair; import com.linecorp.armeria.server.HttpService; import com.linecorp.armeria.server.ServerBuilder; import com.linecorp.armeria.server.ServiceRequestContext; -import com.linecorp.armeria.server.logging.LoggingService; import com.linecorp.armeria.testing.junit5.server.SelfSignedCertificateExtension; import com.linecorp.armeria.testing.junit5.server.ServerExtension; import io.github.netmikey.logunit.api.LogCapturer; +import io.opentelemetry.exporter.internal.FailedExportException; import io.opentelemetry.exporter.internal.TlsUtil; +import io.opentelemetry.exporter.internal.compression.GzipCompressor; import io.opentelemetry.exporter.internal.http.HttpExporter; +import io.opentelemetry.exporter.internal.http.HttpSender; import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.otlp.testing.internal.compressor.Base64Compressor; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest; import io.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse; @@ -37,22 +40,28 @@ import io.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest; import io.opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.UncheckedIOException; import java.lang.reflect.Field; +import java.net.InetSocketAddress; +import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.security.cert.CertificateEncodingException; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; +import java.util.Base64; import java.util.Collections; +import java.util.Enumeration; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; @@ -66,6 +75,9 @@ import okio.Buffer; import okio.GzipSource; import okio.Okio; +import okio.Source; +import org.assertj.core.api.Assertions; +import org.assertj.core.api.InstanceOfAssertFactories; import org.assertj.core.api.iterable.ThrowingExtractor; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; @@ -80,6 +92,7 @@ import org.junit.jupiter.params.provider.ArgumentsProvider; import org.junit.jupiter.params.provider.ArgumentsSource; import org.junit.jupiter.params.provider.ValueSource; +import org.mockserver.integration.ClientAndServer; import org.slf4j.event.Level; import org.slf4j.event.LoggingEvent; @@ -133,9 +146,10 @@ protected void configure(ServerBuilder sb) { sb.http(0); sb.https(0); - sb.tls(certificate.certificateFile(), certificate.privateKeyFile()); + sb.tls(TlsKeyPair.of(certificate.privateKey(), certificate.certificate())); sb.tlsCustomizer(ssl -> ssl.trustManager(clientCertificate.certificate())); - sb.decorator(LoggingService.newDecorator()); + // Uncomment for detailed request / response logs from server + // sb.decorator(LoggingService.newDecorator()); } }; @@ -163,8 +177,7 @@ public HttpResponse serve(ServiceRequestContext ctx, HttpRequest req) { aggReq -> { T request; try { - byte[] requestBody = - maybeGzipInflate(aggReq.headers(), aggReq.content().array()); + byte[] requestBody = maybeInflate(aggReq.headers(), aggReq.content().array()); request = parse.extractThrows(requestBody); } catch (IOException e) { throw new UncheckedIOException(e); @@ -181,15 +194,22 @@ public HttpResponse serve(ServiceRequestContext ctx, HttpRequest req) { return HttpResponse.of(responseFuture); } - private static byte[] maybeGzipInflate(RequestHeaders requestHeaders, byte[] content) + private static byte[] maybeInflate(RequestHeaders requestHeaders, byte[] content) throws IOException { - if (!requestHeaders.contains("content-encoding", "gzip")) { - return content; + if (requestHeaders.contains("content-encoding", "gzip")) { + Buffer buffer = new Buffer(); + GzipSource gzipSource = new GzipSource(Okio.source(new ByteArrayInputStream(content))); + gzipSource.read(buffer, Integer.MAX_VALUE); + return buffer.readByteArray(); } - Buffer buffer = new Buffer(); - GzipSource gzipSource = new GzipSource(Okio.source(new ByteArrayInputStream(content))); - gzipSource.read(buffer, Integer.MAX_VALUE); - return buffer.readByteArray(); + if (requestHeaders.contains("content-encoding", "base64")) { + Buffer buffer = new Buffer(); + Source base64Source = + Okio.source(Base64.getDecoder().wrap(new ByteArrayInputStream(content))); + base64Source.read(buffer, Integer.MAX_VALUE); + return buffer.readByteArray(); + } + return content; } } @@ -210,7 +230,18 @@ protected AbstractHttpTelemetryExporterTest( @BeforeAll void setUp() { - exporter = exporterBuilder().setEndpoint(server.httpUri() + path).build(); + // + exporter = + exporterBuilder() + .setEndpoint(server.httpUri() + path) + // We don't validate backoff time itself in these tests, just that retries + // occur. Keep the tests fast by using minimal backoff. + .setRetryPolicy( + RetryPolicy.getDefault().toBuilder() + .setMaxAttempts(2) + .setInitialBackoff(Duration.ofMillis(1)) + .build()) + .build(); // Sanity check that TLS files are in PEM format. assertThat(certificate.certificateFile()) @@ -275,9 +306,7 @@ void multipleItems() { void compressionWithNone() { TelemetryExporter exporter = exporterBuilder().setEndpoint(server.httpUri() + path).setCompression("none").build(); - assertThat(exporter.unwrap()) - .extracting("delegate.httpSender.compressionEnabled") - .isEqualTo(false); + assertThat(exporter.unwrap()).extracting("delegate.httpSender.compressor").isNull(); try { CompletableResultCode result = exporter.export(Collections.singletonList(generateFakeTelemetry())); @@ -295,8 +324,8 @@ void compressionWithGzip() { TelemetryExporter exporter = exporterBuilder().setEndpoint(server.httpUri() + path).setCompression("gzip").build(); assertThat(exporter.unwrap()) - .extracting("delegate.httpSender.compressionEnabled") - .isEqualTo(true); + .extracting("delegate.httpSender.compressor") + .isEqualTo(GzipCompressor.getInstance()); try { CompletableResultCode result = exporter.export(Collections.singletonList(generateFakeTelemetry())); @@ -310,61 +339,66 @@ void compressionWithGzip() { } @Test - void authorityWithAuth() { + void compressionWithSpiCompressor() { TelemetryExporter exporter = - exporterBuilder() - .setEndpoint("http://foo:bar@localhost:" + server.httpPort() + path) - .build(); + exporterBuilder().setEndpoint(server.httpUri() + path).setCompression("base64").build(); + assertThat(exporter.unwrap()) + .extracting("delegate.httpSender.compressor") + .isEqualTo(Base64Compressor.getInstance()); try { CompletableResultCode result = exporter.export(Collections.singletonList(generateFakeTelemetry())); assertThat(result.join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); + assertThat(httpRequests) + .singleElement() + .satisfies(req -> assertThat(req.headers().get("content-encoding")).isEqualTo("base64")); } finally { exporter.shutdown(); } } @Test - void withHeaders() { + void authorityWithAuth() { TelemetryExporter exporter = - exporterBuilder().setEndpoint(server.httpUri() + path).addHeader("key", "value").build(); + exporterBuilder() + .setEndpoint("http://foo:bar@localhost:" + server.httpPort() + path) + .build(); try { CompletableResultCode result = exporter.export(Collections.singletonList(generateFakeTelemetry())); assertThat(result.join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); - assertThat(httpRequests) - .singleElement() - .satisfies(req -> assertThat(req.headers().get("key")).isEqualTo("value")); } finally { exporter.shutdown(); } } @Test - void withAuthenticator() { - assumeThat(hasAuthenticatorSupport()).isTrue(); - + void withHeaders() { + AtomicInteger count = new AtomicInteger(); TelemetryExporter exporter = exporterBuilder() .setEndpoint(server.httpUri() + path) - .setAuthenticator(() -> Collections.singletonMap("key", "value")) + .addHeader("key1", "value1") + .setHeaders(() -> Collections.singletonMap("key2", "value" + count.incrementAndGet())) .build(); - - addHttpError(401); - try { - assertThat( - exporter - .export(Collections.singletonList(generateFakeTelemetry())) - .join(10, TimeUnit.SECONDS) - .isSuccess()) - .isTrue(); - assertThat(httpRequests) - .element(0) - .satisfies(req -> assertThat(req.headers().get("key")).isNull()); + // Export twice to ensure header supplier gets invoked twice + CompletableResultCode result = + exporter.export(Collections.singletonList(generateFakeTelemetry())); + assertThat(result.join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); + result = exporter.export(Collections.singletonList(generateFakeTelemetry())); + assertThat(result.join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); + assertThat(httpRequests) - .element(1) - .satisfies(req -> assertThat(req.headers().get("key")).isEqualTo("value")); + .satisfiesExactly( + req -> { + assertThat(req.headers().get("key1")).isEqualTo("value1"); + assertThat(req.headers().get("key2")).isEqualTo("value" + (count.get() - 1)); + }, + req -> { + assertThat(req.headers().get("key1")).isEqualTo("value1"); + assertThat(req.headers().get("key2")).isEqualTo("value" + count.get()); + }); } finally { exporter.shutdown(); } @@ -464,6 +498,43 @@ public Stream provideArguments(ExtensionContext context) th } } + @Test + @SuppressLogger(HttpExporter.class) + void connectTimeout() { + TelemetryExporter exporter = + exporterBuilder() + // Connecting to a non-routable IP address to trigger connection error + .setEndpoint("http://10.255.255.1") + .setConnectTimeout(Duration.ofMillis(1)) + .setRetryPolicy(null) + .build(); + try { + long startTimeMillis = System.currentTimeMillis(); + CompletableResultCode result = + exporter + .export(Collections.singletonList(generateFakeTelemetry())) + .join(10, TimeUnit.SECONDS); + + assertThat(result.isSuccess()).isFalse(); + + assertThat(result.getFailureThrowable()) + .asInstanceOf( + InstanceOfAssertFactories.throwable(FailedExportException.HttpExportException.class)) + .returns(false, Assertions.from(FailedExportException::failedWithResponse)) + .satisfies( + ex -> { + assertThat(ex.getResponse()).isNull(); + assertThat(ex.getCause()).isNotNull(); + }); + + // Assert that the export request fails well before the default connect timeout of 10s + assertThat(System.currentTimeMillis() - startTimeMillis) + .isLessThan(TimeUnit.SECONDS.toMillis(1)); + } finally { + exporter.shutdown(); + } + } + @Test void deadlineSetPerExport() throws InterruptedException { TelemetryExporter exporter = @@ -510,13 +581,35 @@ void doubleShutdown() { @Test @SuppressLogger(HttpExporter.class) void error() { - addHttpError(500); - assertThat( - exporter - .export(Collections.singletonList(generateFakeTelemetry())) - .join(10, TimeUnit.SECONDS) - .isSuccess()) - .isFalse(); + int statusCode = 500; + addHttpError(statusCode); + CompletableResultCode result = + exporter + .export(Collections.singletonList(generateFakeTelemetry())) + .join(10, TimeUnit.SECONDS); + + assertThat(result.isSuccess()).isFalse(); + + assertThat(result.getFailureThrowable()) + .asInstanceOf( + InstanceOfAssertFactories.throwable(FailedExportException.HttpExportException.class)) + .returns(true, Assertions.from(FailedExportException::failedWithResponse)) + .satisfies( + ex -> { + assertThat(ex.getResponse()) + .isNotNull() + .satisfies( + response -> { + assertThat(response) + .extracting(HttpSender.Response::statusCode) + .isEqualTo(statusCode); + + assertThatCode(response::responseBody).doesNotThrowAnyException(); + }); + + assertThat(ex.getCause()).isNull(); + }); + LoggingEvent log = logs.assertContains( "Failed to export " @@ -530,18 +623,12 @@ void error() { void retryableError(int code) { addHttpError(code); - TelemetryExporter exporter = retryingExporter(); - - try { - assertThat( - exporter - .export(Collections.singletonList(generateFakeTelemetry())) - .join(10, TimeUnit.SECONDS) - .isSuccess()) - .isTrue(); - } finally { - exporter.shutdown(); - } + assertThat( + exporter + .export(Collections.singletonList(generateFakeTelemetry())) + .join(10, TimeUnit.SECONDS) + .isSuccess()) + .isTrue(); assertThat(attempts).hasValue(2); } @@ -552,18 +639,12 @@ void retryableError_tooManyAttempts() { addHttpError(502); addHttpError(502); - TelemetryExporter exporter = retryingExporter(); - - try { - assertThat( - exporter - .export(Collections.singletonList(generateFakeTelemetry())) - .join(10, TimeUnit.SECONDS) - .isSuccess()) - .isFalse(); - } finally { - exporter.shutdown(); - } + assertThat( + exporter + .export(Collections.singletonList(generateFakeTelemetry())) + .join(10, TimeUnit.SECONDS) + .isSuccess()) + .isFalse(); assertThat(attempts).hasValue(2); } @@ -574,32 +655,110 @@ void retryableError_tooManyAttempts() { void nonRetryableError(int code) { addHttpError(code); - TelemetryExporter exporter = retryingExporter(); + assertThat( + exporter + .export(Collections.singletonList(generateFakeTelemetry())) + .join(10, TimeUnit.SECONDS) + .isSuccess()) + .isFalse(); + + assertThat(attempts).hasValue(1); + } + + @Test + void proxy() { + // configure mockserver to proxy to the local OTLP server + InetSocketAddress serverSocketAddress = server.httpSocketAddress(); + try (ClientAndServer clientAndServer = + ClientAndServer.startClientAndServer( + serverSocketAddress.getHostName(), serverSocketAddress.getPort())) { + TelemetryExporter exporter = + exporterBuilder() + // Configure exporter with server endpoint, and proxy options to route through + // mockserver proxy + .setEndpoint(server.httpUri() + path) + .setProxyOptions( + ProxyOptions.create( + InetSocketAddress.createUnresolved("localhost", clientAndServer.getPort()))) + .build(); + + try { + List telemetry = Collections.singletonList(generateFakeTelemetry()); + + assertThat(exporter.export(telemetry).join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); + // assert that mock server received request + assertThat(clientAndServer.retrieveRecordedRequests(new org.mockserver.model.HttpRequest())) + .hasSize(1); + // assert that server received telemetry from proxy, and is as expected + List expectedResourceTelemetry = toProto(telemetry); + assertThat(exportedResourceTelemetry).containsExactlyElementsOf(expectedResourceTelemetry); + } finally { + exporter.shutdown(); + } + } + } + + @Test + void executorService() { + ExecutorServiceSpy executorService = + new ExecutorServiceSpy(Executors.newSingleThreadExecutor()); + + TelemetryExporter exporter = + exporterBuilder() + .setEndpoint(server.httpUri() + path) + .setExecutorService(executorService) + .build(); try { - assertThat( - exporter - .export(Collections.singletonList(generateFakeTelemetry())) - .join(10, TimeUnit.SECONDS) - .isSuccess()) - .isFalse(); + CompletableResultCode result = + exporter.export(Collections.singletonList(generateFakeTelemetry())); + + assertThat(result.join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); + assertThat(executorService.getTaskCount()).isPositive(); } finally { exporter.shutdown(); + // If setting executor, the user is responsible for calling shutdown + assertThat(executorService.isShutdown()).isFalse(); + executorService.shutdown(); } - - assertThat(attempts).hasValue(1); } @Test @SuppressWarnings("PreferJavaTimeOverload") void validConfig() { - assertThatCode(() -> exporterBuilder().setTimeout(0, TimeUnit.MILLISECONDS)) + // We must build exporters to test timeout settings, which intersect with underlying client + // implementations and may convert between Duration, int, and long, which may be susceptible to + // overflow exceptions. + assertThatCode(() -> buildAndShutdown(exporterBuilder().setTimeout(0, TimeUnit.MILLISECONDS))) + .doesNotThrowAnyException(); + assertThatCode(() -> buildAndShutdown(exporterBuilder().setTimeout(Duration.ofMillis(0)))) + .doesNotThrowAnyException(); + assertThatCode( + () -> + buildAndShutdown( + exporterBuilder().setTimeout(Long.MAX_VALUE, TimeUnit.NANOSECONDS))) .doesNotThrowAnyException(); - assertThatCode(() -> exporterBuilder().setTimeout(Duration.ofMillis(0))) + assertThatCode( + () -> buildAndShutdown(exporterBuilder().setTimeout(Duration.ofNanos(Long.MAX_VALUE)))) + .doesNotThrowAnyException(); + assertThatCode( + () -> buildAndShutdown(exporterBuilder().setTimeout(Long.MAX_VALUE, TimeUnit.SECONDS))) + .doesNotThrowAnyException(); + assertThatCode(() -> buildAndShutdown(exporterBuilder().setTimeout(10, TimeUnit.MILLISECONDS))) + .doesNotThrowAnyException(); + assertThatCode(() -> buildAndShutdown(exporterBuilder().setTimeout(Duration.ofMillis(10)))) .doesNotThrowAnyException(); - assertThatCode(() -> exporterBuilder().setTimeout(10, TimeUnit.MILLISECONDS)) + assertThatCode( + () -> buildAndShutdown(exporterBuilder().setConnectTimeout(0, TimeUnit.MILLISECONDS))) .doesNotThrowAnyException(); - assertThatCode(() -> exporterBuilder().setTimeout(Duration.ofMillis(10))) + assertThatCode( + () -> buildAndShutdown(exporterBuilder().setConnectTimeout(Duration.ofMillis(0)))) + .doesNotThrowAnyException(); + assertThatCode( + () -> buildAndShutdown(exporterBuilder().setConnectTimeout(10, TimeUnit.MILLISECONDS))) + .doesNotThrowAnyException(); + assertThatCode( + () -> buildAndShutdown(exporterBuilder().setConnectTimeout(Duration.ofMillis(10)))) .doesNotThrowAnyException(); assertThatCode(() -> exporterBuilder().setEndpoint("http://localhost:4318")) @@ -612,6 +771,8 @@ void validConfig() { .doesNotThrowAnyException(); assertThatCode(() -> exporterBuilder().setCompression("gzip")).doesNotThrowAnyException(); + // SPI compressor available for this test but not packaged with OTLP exporter + assertThatCode(() -> exporterBuilder().setCompression("base64")).doesNotThrowAnyException(); assertThatCode(() -> exporterBuilder().setCompression("none")).doesNotThrowAnyException(); assertThatCode(() -> exporterBuilder().addHeader("foo", "bar").addHeader("baz", "qux")) @@ -622,6 +783,11 @@ void validConfig() { .doesNotThrowAnyException(); } + private void buildAndShutdown(TelemetryExporterBuilder builder) { + TelemetryExporter build = builder.build(); + build.shutdown().join(10, TimeUnit.MILLISECONDS); + } + @Test @SuppressWarnings({"PreferJavaTimeOverload", "NullAway"}) void invalidConfig() { @@ -634,6 +800,25 @@ void invalidConfig() { assertThatThrownBy(() -> exporterBuilder().setTimeout(null)) .isInstanceOf(NullPointerException.class) .hasMessage("timeout"); + assertThatThrownBy( + () -> + buildAndShutdown(exporterBuilder().setTimeout(Duration.ofSeconds(Long.MAX_VALUE)))) + .isInstanceOf(ArithmeticException.class); + + assertThatThrownBy(() -> exporterBuilder().setConnectTimeout(-1, TimeUnit.MILLISECONDS)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("timeout must be non-negative"); + assertThatThrownBy(() -> exporterBuilder().setConnectTimeout(1, null)) + .isInstanceOf(NullPointerException.class) + .hasMessage("unit"); + assertThatThrownBy(() -> exporterBuilder().setConnectTimeout(null)) + .isInstanceOf(NullPointerException.class) + .hasMessage("timeout"); + assertThatThrownBy( + () -> + buildAndShutdown( + exporterBuilder().setConnectTimeout(Duration.ofSeconds(Long.MAX_VALUE)))) + .isInstanceOf(ArithmeticException.class); assertThatThrownBy(() -> exporterBuilder().setEndpoint(null)) .isInstanceOf(NullPointerException.class) @@ -654,7 +839,7 @@ void invalidConfig() { assertThatThrownBy(() -> exporterBuilder().setCompression("foo")) .isInstanceOf(IllegalArgumentException.class) .hasMessage( - "Unsupported compression method. Supported compression methods include: gzip, none."); + "Unsupported compressionMethod. Compression method must be \"none\" or one of: [base64,gzip]"); } @Test @@ -666,6 +851,7 @@ void toBuilderEquality() TelemetryExporter exporter = exporterBuilder() .setTimeout(Duration.ofSeconds(5)) + .setConnectTimeout(Duration.ofSeconds(4)) .setEndpoint("http://localhost:4318") .setCompression("gzip") .addHeader("foo", "bar") @@ -715,6 +901,39 @@ void toBuilderEquality() } } + @Test + void customServiceClassLoader() { + ClassLoaderSpy classLoaderSpy = + new ClassLoaderSpy(AbstractHttpTelemetryExporterTest.class.getClassLoader()); + + TelemetryExporter exporter = + exporterBuilder() + .setServiceClassLoader(classLoaderSpy) + .setEndpoint(server.httpUri() + path) + .build(); + + assertThat(classLoaderSpy.getResourcesNames) + .isEqualTo( + Collections.singletonList( + "META-INF/services/io.opentelemetry.exporter.internal.http.HttpSenderProvider")); + + exporter.shutdown(); + } + + private static class ClassLoaderSpy extends ClassLoader { + private final List getResourcesNames = new ArrayList<>(); + + private ClassLoaderSpy(ClassLoader delegate) { + super(delegate); + } + + @Override + public Enumeration getResources(String name) throws IOException { + getResourcesNames.add(name); + return super.getResources(name); + } + } + @Test void stringRepresentation() throws IOException, CertificateEncodingException { TelemetryExporter telemetryExporter = exporterBuilder().build(); @@ -728,9 +947,14 @@ void stringRepresentation() throws IOException, CertificateEncodingException { + "timeoutNanos=" + TimeUnit.SECONDS.toNanos(10) + ", " - + "compressionEnabled=false, " + + "proxyOptions=null, " + + "compressorEncoding=null, " + + "connectTimeoutNanos=" + + TimeUnit.SECONDS.toNanos(10) + + ", " + "exportAsJson=false, " + "headers=Headers\\{User-Agent=OBFUSCATED\\}" + + ".*" // Maybe additional signal specific fields + "\\}"); } finally { telemetryExporter.shutdown(); @@ -739,6 +963,7 @@ void stringRepresentation() throws IOException, CertificateEncodingException { telemetryExporter = exporterBuilder() .setTimeout(Duration.ofSeconds(5)) + .setConnectTimeout(Duration.ofSeconds(4)) .setEndpoint("http://example:4318/v1/logs") .setCompression("gzip") .addHeader("foo", "bar") @@ -764,10 +989,15 @@ void stringRepresentation() throws IOException, CertificateEncodingException { + "timeoutNanos=" + TimeUnit.SECONDS.toNanos(5) + ", " - + "compressionEnabled=true, " + + "proxyOptions=null, " + + "compressorEncoding=gzip, " + + "connectTimeoutNanos=" + + TimeUnit.SECONDS.toNanos(4) + + ", " + "exportAsJson=false, " + "headers=Headers\\{.*foo=OBFUSCATED.*\\}, " - + "retryPolicy=RetryPolicy\\{maxAttempts=2, initialBackoff=PT0\\.05S, maxBackoff=PT3S, backoffMultiplier=1\\.3\\}" + + "retryPolicy=RetryPolicy\\{maxAttempts=2, initialBackoff=PT0\\.05S, maxBackoff=PT3S, backoffMultiplier=1\\.3, retryExceptionPredicate=null\\}" + + ".*" // Maybe additional signal specific fields + "\\}"); } finally { telemetryExporter.shutdown(); @@ -782,11 +1012,6 @@ void stringRepresentation() throws IOException, CertificateEncodingException { protected abstract Marshaler[] toMarshalers(List telemetry); - // TODO: remove once JdkHttpSender supports authenticator - protected boolean hasAuthenticatorSupport() { - return true; - } - private List toProto(List telemetry) { return Arrays.stream(toMarshalers(telemetry)) .map( @@ -809,21 +1034,6 @@ private List toProto(List telemetry) { .collect(Collectors.toList()); } - private TelemetryExporter retryingExporter() { - return exporterBuilder() - .setEndpoint(server.httpUri() + path) - .setRetryPolicy( - RetryPolicy.builder() - .setMaxAttempts(2) - // We don't validate backoff time itself in these tests, just that retries - // occur. Keep the tests fast by using minimal backoff. - .setInitialBackoff(Duration.ofMillis(1)) - .setMaxBackoff(Duration.ofMillis(1)) - .setBackoffMultiplier(1) - .build()) - .build(); - } - private static void addHttpError(int code) { httpErrors.add(HttpResponse.of(code)); } diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/ExecutorServiceSpy.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/ExecutorServiceSpy.java new file mode 100644 index 00000000000..f16cf99edb0 --- /dev/null +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/ExecutorServiceSpy.java @@ -0,0 +1,113 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.testing.internal; + +import java.util.Collection; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import org.jetbrains.annotations.NotNull; + +class ExecutorServiceSpy implements ExecutorService { + + private final ExecutorService delegate; + private int taskCount; + + ExecutorServiceSpy(ExecutorService delegate) { + this.delegate = delegate; + } + + @Override + public void shutdown() { + delegate.shutdown(); + } + + @NotNull + @Override + public List shutdownNow() { + return delegate.shutdownNow(); + } + + @Override + public boolean isShutdown() { + return delegate.isShutdown(); + } + + @Override + public boolean isTerminated() { + return delegate.isTerminated(); + } + + @Override + public boolean awaitTermination(long timeout, @NotNull TimeUnit unit) + throws InterruptedException { + return delegate.awaitTermination(timeout, unit); + } + + @NotNull + @Override + public Future submit(@NotNull Callable task) { + taskCount++; + return delegate.submit(task); + } + + @NotNull + @Override + public Future submit(@NotNull Runnable task, T result) { + taskCount++; + return delegate.submit(task, result); + } + + @NotNull + @Override + public Future submit(@NotNull Runnable task) { + taskCount++; + return delegate.submit(task); + } + + @NotNull + @Override + public List> invokeAll(@NotNull Collection> tasks) + throws InterruptedException { + return delegate.invokeAll(tasks); + } + + @NotNull + @Override + public List> invokeAll( + @NotNull Collection> tasks, long timeout, @NotNull TimeUnit unit) + throws InterruptedException { + return delegate.invokeAll(tasks, timeout, unit); + } + + @NotNull + @Override + public T invokeAny(@NotNull Collection> tasks) + throws InterruptedException, ExecutionException { + return delegate.invokeAny(tasks); + } + + @Override + public T invokeAny( + @NotNull Collection> tasks, long timeout, @NotNull TimeUnit unit) + throws InterruptedException, ExecutionException, TimeoutException { + return delegate.invokeAny(tasks, timeout, unit); + } + + @Override + public void execute(@NotNull Runnable command) { + taskCount++; + delegate.execute(command); + } + + public int getTaskCount() { + return taskCount; + } +} diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/FakeTelemetryUtil.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/FakeTelemetryUtil.java index 2375e35aef1..2bdf65ef23e 100644 --- a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/FakeTelemetryUtil.java +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/FakeTelemetryUtil.java @@ -33,6 +33,11 @@ public class FakeTelemetryUtil { private static final String TRACE_ID = "00000000000000000000000000abc123"; private static final String SPAN_ID = "0000000000def456"; + private static final InstrumentationScopeInfo SCOPE_INFO = + InstrumentationScopeInfo.builder("testLib") + .setVersion("1.0") + .setSchemaUrl("http://url") + .build(); /** Generate a fake {@link MetricData}. */ public static MetricData generateFakeMetricData() { @@ -40,7 +45,7 @@ public static MetricData generateFakeMetricData() { long endNs = startNs + TimeUnit.MILLISECONDS.toNanos(900); return ImmutableMetricData.createLongSum( Resource.empty(), - InstrumentationScopeInfo.empty(), + SCOPE_INFO, "name", "description", "1", @@ -69,11 +74,7 @@ public static SpanData generateFakeSpanData() { .setLinks(Collections.emptyList()) .setTotalRecordedLinks(0) .setTotalRecordedEvents(0) - .setInstrumentationScopeInfo( - InstrumentationScopeInfo.builder("testLib") - .setVersion("1.0") - .setSchemaUrl("http://url") - .build()) + .setInstrumentationScopeInfo(SCOPE_INFO) .build(); } @@ -81,11 +82,7 @@ public static SpanData generateFakeSpanData() { public static LogRecordData generateFakeLogRecordData() { return TestLogRecordData.builder() .setResource(Resource.getDefault()) - .setInstrumentationScopeInfo( - InstrumentationScopeInfo.builder("testLib") - .setVersion("1.0") - .setSchemaUrl("http://url") - .build()) + .setInstrumentationScopeInfo(SCOPE_INFO) .setBody("log body") .setAttributes(Attributes.builder().put("key", "value").build()) .setSeverity(Severity.INFO) diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/GrpcLogRecordExporterBuilderWrapper.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/GrpcLogRecordExporterBuilderWrapper.java index 993b13312f3..e0a2da5c767 100644 --- a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/GrpcLogRecordExporterBuilderWrapper.java +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/GrpcLogRecordExporterBuilderWrapper.java @@ -6,12 +6,16 @@ package io.opentelemetry.exporter.otlp.testing.internal; import io.grpc.ManagedChannel; -import io.opentelemetry.exporter.internal.auth.Authenticator; import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import io.opentelemetry.sdk.logs.data.LogRecordData; import java.time.Duration; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -40,6 +44,18 @@ public TelemetryExporterBuilder setTimeout(Duration timeout) { return this; } + @Override + public TelemetryExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + builder.setConnectTimeout(timeout, unit); + return this; + } + + @Override + public TelemetryExporterBuilder setConnectTimeout(Duration timeout) { + builder.setConnectTimeout(timeout); + return this; + } + @Override public TelemetryExporterBuilder setCompression(String compression) { builder.setCompression(compression); @@ -53,7 +69,9 @@ public TelemetryExporterBuilder addHeader(String key, String valu } @Override - public TelemetryExporterBuilder setAuthenticator(Authenticator authenticator) { + public TelemetryExporterBuilder setHeaders( + Supplier> headerSupplier) { + builder.setHeaders(headerSupplier); return this; } @@ -78,11 +96,16 @@ public TelemetryExporterBuilder setSslContext( } @Override - public TelemetryExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { + public TelemetryExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { builder.setRetryPolicy(retryPolicy); return this; } + @Override + public TelemetryExporterBuilder setProxyOptions(ProxyOptions proxyOptions) { + throw new UnsupportedOperationException("ProxyOptions are not supported for gRPC"); + } + @Override @SuppressWarnings("deprecation") // testing deprecated functionality public TelemetryExporterBuilder setChannel(Object channel) { @@ -90,6 +113,20 @@ public TelemetryExporterBuilder setChannel(Object channel) { return this; } + @Override + public TelemetryExporterBuilder setServiceClassLoader( + ClassLoader serviceClassLoader) { + builder.setServiceClassLoader(serviceClassLoader); + return this; + } + + @Override + public TelemetryExporterBuilder setExecutorService( + ExecutorService executorService) { + builder.setExecutorService(executorService); + return this; + } + @Override public TelemetryExporter build() { return TelemetryExporter.wrap(builder.build()); diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/GrpcMetricExporterBuilderWrapper.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/GrpcMetricExporterBuilderWrapper.java index 0cfa4401ab1..09bd0a339be 100644 --- a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/GrpcMetricExporterBuilderWrapper.java +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/GrpcMetricExporterBuilderWrapper.java @@ -6,12 +6,16 @@ package io.opentelemetry.exporter.otlp.testing.internal; import io.grpc.ManagedChannel; -import io.opentelemetry.exporter.internal.auth.Authenticator; import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import io.opentelemetry.sdk.metrics.data.MetricData; import java.time.Duration; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -40,6 +44,18 @@ public TelemetryExporterBuilder setTimeout(Duration timeout) { return this; } + @Override + public TelemetryExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + builder.setConnectTimeout(timeout, unit); + return this; + } + + @Override + public TelemetryExporterBuilder setConnectTimeout(Duration timeout) { + builder.setConnectTimeout(timeout); + return this; + } + @Override public TelemetryExporterBuilder setCompression(String compression) { builder.setCompression(compression); @@ -53,7 +69,9 @@ public TelemetryExporterBuilder addHeader(String key, String value) } @Override - public TelemetryExporterBuilder setAuthenticator(Authenticator authenticator) { + public TelemetryExporterBuilder setHeaders( + Supplier> headerSupplier) { + builder.setHeaders(headerSupplier); return this; } @@ -78,11 +96,16 @@ public TelemetryExporterBuilder setSslContext( } @Override - public TelemetryExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { + public TelemetryExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { builder.setRetryPolicy(retryPolicy); return this; } + @Override + public TelemetryExporterBuilder setProxyOptions(ProxyOptions proxyOptions) { + throw new UnsupportedOperationException("ProxyOptions are not supported for gRPC"); + } + @Override @SuppressWarnings("deprecation") // testing deprecated functionality public TelemetryExporterBuilder setChannel(Object channel) { @@ -90,6 +113,19 @@ public TelemetryExporterBuilder setChannel(Object channel) { return this; } + @Override + public TelemetryExporterBuilder setServiceClassLoader( + ClassLoader serviceClassLoader) { + builder.setServiceClassLoader(serviceClassLoader); + return this; + } + + @Override + public TelemetryExporterBuilder setExecutorService(ExecutorService executorService) { + builder.setExecutorService(executorService); + return this; + } + @Override public TelemetryExporter build() { return TelemetryExporter.wrap(builder.build()); diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/GrpcSpanExporterBuilderWrapper.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/GrpcSpanExporterBuilderWrapper.java index bc4a10aa8f1..0ddb5e46d85 100644 --- a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/GrpcSpanExporterBuilderWrapper.java +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/GrpcSpanExporterBuilderWrapper.java @@ -6,12 +6,16 @@ package io.opentelemetry.exporter.otlp.testing.internal; import io.grpc.ManagedChannel; -import io.opentelemetry.exporter.internal.auth.Authenticator; import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import io.opentelemetry.sdk.trace.data.SpanData; import java.time.Duration; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -41,6 +45,18 @@ public TelemetryExporterBuilder setTimeout(Duration timeout) { return this; } + @Override + public TelemetryExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + builder.setConnectTimeout(timeout, unit); + return this; + } + + @Override + public TelemetryExporterBuilder setConnectTimeout(Duration timeout) { + builder.setConnectTimeout(timeout); + return this; + } + @Override public TelemetryExporterBuilder setCompression(String compression) { builder.setCompression(compression); @@ -54,7 +70,9 @@ public TelemetryExporterBuilder addHeader(String key, String value) { } @Override - public TelemetryExporterBuilder setAuthenticator(Authenticator authenticator) { + public TelemetryExporterBuilder setHeaders( + Supplier> headerSupplier) { + builder.setHeaders(headerSupplier); return this; } @@ -79,11 +97,16 @@ public TelemetryExporterBuilder setSslContext( } @Override - public TelemetryExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { + public TelemetryExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { builder.setRetryPolicy(retryPolicy); return this; } + @Override + public TelemetryExporterBuilder setProxyOptions(ProxyOptions proxyOptions) { + throw new UnsupportedOperationException("ProxyOptions are not supported for gRPC"); + } + @Override @SuppressWarnings("deprecation") // testing deprecated functionality public TelemetryExporterBuilder setChannel(Object channel) { @@ -91,6 +114,18 @@ public TelemetryExporterBuilder setChannel(Object channel) { return this; } + @Override + public TelemetryExporterBuilder setServiceClassLoader(ClassLoader serviceClassLoader) { + builder.setServiceClassLoader(serviceClassLoader); + return this; + } + + @Override + public TelemetryExporterBuilder setExecutorService(ExecutorService executorService) { + builder.setExecutorService(executorService); + return this; + } + @Override public TelemetryExporter build() { return TelemetryExporter.wrap(builder.build()); diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/HttpLogRecordExporterBuilderWrapper.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/HttpLogRecordExporterBuilderWrapper.java index 7e007fa6e7b..0060bde1d16 100644 --- a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/HttpLogRecordExporterBuilderWrapper.java +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/HttpLogRecordExporterBuilderWrapper.java @@ -5,12 +5,16 @@ package io.opentelemetry.exporter.otlp.testing.internal; -import io.opentelemetry.exporter.internal.auth.Authenticator; import io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporterBuilder; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import io.opentelemetry.sdk.logs.data.LogRecordData; import java.time.Duration; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -41,6 +45,18 @@ public TelemetryExporterBuilder setTimeout(Duration timeout) { return this; } + @Override + public TelemetryExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + builder.setConnectTimeout(timeout, unit); + return this; + } + + @Override + public TelemetryExporterBuilder setConnectTimeout(Duration timeout) { + builder.setConnectTimeout(timeout); + return this; + } + @Override public TelemetryExporterBuilder setCompression(String compression) { builder.setCompression(compression); @@ -54,8 +70,9 @@ public TelemetryExporterBuilder addHeader(String key, String valu } @Override - public TelemetryExporterBuilder setAuthenticator(Authenticator authenticator) { - Authenticator.setAuthenticatorOnDelegate(builder, authenticator); + public TelemetryExporterBuilder setHeaders( + Supplier> headerSupplier) { + builder.setHeaders(headerSupplier); return this; } @@ -80,16 +97,36 @@ public TelemetryExporterBuilder setClientTls( } @Override - public TelemetryExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { + public TelemetryExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { builder.setRetryPolicy(retryPolicy); return this; } + @Override + public TelemetryExporterBuilder setProxyOptions(ProxyOptions proxyOptions) { + builder.setProxyOptions(proxyOptions); + return this; + } + @Override public TelemetryExporterBuilder setChannel(Object channel) { throw new UnsupportedOperationException("Not implemented"); } + @Override + public TelemetryExporterBuilder setServiceClassLoader( + ClassLoader serviceClassLoader) { + builder.setServiceClassLoader(serviceClassLoader); + return this; + } + + @Override + public TelemetryExporterBuilder setExecutorService( + ExecutorService executorService) { + builder.setExecutorService(executorService); + return this; + } + @Override public TelemetryExporter build() { return TelemetryExporter.wrap(builder.build()); diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/HttpMetricExporterBuilderWrapper.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/HttpMetricExporterBuilderWrapper.java index e3d9d1cc20e..f9a9f87c7ae 100644 --- a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/HttpMetricExporterBuilderWrapper.java +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/HttpMetricExporterBuilderWrapper.java @@ -5,12 +5,16 @@ package io.opentelemetry.exporter.otlp.testing.internal; -import io.opentelemetry.exporter.internal.auth.Authenticator; import io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporterBuilder; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import io.opentelemetry.sdk.metrics.data.MetricData; import java.time.Duration; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -40,6 +44,18 @@ public TelemetryExporterBuilder setTimeout(Duration timeout) { return this; } + @Override + public TelemetryExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + builder.setConnectTimeout(timeout, unit); + return this; + } + + @Override + public TelemetryExporterBuilder setConnectTimeout(Duration timeout) { + builder.setConnectTimeout(timeout); + return this; + } + @Override public TelemetryExporterBuilder setCompression(String compression) { builder.setCompression(compression); @@ -53,8 +69,9 @@ public TelemetryExporterBuilder addHeader(String key, String value) } @Override - public TelemetryExporterBuilder setAuthenticator(Authenticator authenticator) { - Authenticator.setAuthenticatorOnDelegate(builder, authenticator); + public TelemetryExporterBuilder setHeaders( + Supplier> headerSupplier) { + builder.setHeaders(headerSupplier); return this; } @@ -79,16 +96,35 @@ public TelemetryExporterBuilder setClientTls( } @Override - public TelemetryExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { + public TelemetryExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { builder.setRetryPolicy(retryPolicy); return this; } + @Override + public TelemetryExporterBuilder setProxyOptions(ProxyOptions proxyOptions) { + builder.setProxyOptions(proxyOptions); + return this; + } + @Override public TelemetryExporterBuilder setChannel(Object channel) { throw new UnsupportedOperationException("Not implemented"); } + @Override + public TelemetryExporterBuilder setServiceClassLoader( + ClassLoader serviceClassLoader) { + builder.setServiceClassLoader(serviceClassLoader); + return this; + } + + @Override + public TelemetryExporterBuilder setExecutorService(ExecutorService executorService) { + builder.setExecutorService(executorService); + return this; + } + @Override public TelemetryExporter build() { return TelemetryExporter.wrap(builder.build()); diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/HttpSpanExporterBuilderWrapper.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/HttpSpanExporterBuilderWrapper.java index 63660c4314b..7bdca0f20e8 100644 --- a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/HttpSpanExporterBuilderWrapper.java +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/HttpSpanExporterBuilderWrapper.java @@ -5,12 +5,16 @@ package io.opentelemetry.exporter.otlp.testing.internal; -import io.opentelemetry.exporter.internal.auth.Authenticator; import io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporterBuilder; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import io.opentelemetry.sdk.trace.data.SpanData; import java.time.Duration; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -40,6 +44,18 @@ public TelemetryExporterBuilder setTimeout(Duration timeout) { return this; } + @Override + public TelemetryExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + builder.setConnectTimeout(timeout, unit); + return this; + } + + @Override + public TelemetryExporterBuilder setConnectTimeout(Duration timeout) { + builder.setConnectTimeout(timeout); + return this; + } + @Override public TelemetryExporterBuilder setCompression(String compression) { builder.setCompression(compression); @@ -53,8 +69,9 @@ public TelemetryExporterBuilder addHeader(String key, String value) { } @Override - public TelemetryExporterBuilder setAuthenticator(Authenticator authenticator) { - Authenticator.setAuthenticatorOnDelegate(builder, authenticator); + public TelemetryExporterBuilder setHeaders( + Supplier> headerSupplier) { + builder.setHeaders(headerSupplier); return this; } @@ -79,16 +96,34 @@ public TelemetryExporterBuilder setClientTls( } @Override - public TelemetryExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { + public TelemetryExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { builder.setRetryPolicy(retryPolicy); return this; } + @Override + public TelemetryExporterBuilder setProxyOptions(ProxyOptions proxyOptions) { + builder.setProxy(proxyOptions); + return this; + } + @Override public TelemetryExporterBuilder setChannel(Object channel) { throw new UnsupportedOperationException("Not implemented"); } + @Override + public TelemetryExporterBuilder setServiceClassLoader(ClassLoader serviceClassLoader) { + builder.setServiceClassLoader(serviceClassLoader); + return this; + } + + @Override + public TelemetryExporterBuilder setExecutorService(ExecutorService executorService) { + builder.setExecutorService(executorService); + return this; + } + @Override public TelemetryExporter build() { return TelemetryExporter.wrap(builder.build()); diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/ManagedChannelTelemetryExporterBuilder.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/ManagedChannelTelemetryExporterBuilder.java index 79781eb5809..8b080cfbfda 100644 --- a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/ManagedChannelTelemetryExporterBuilder.java +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/ManagedChannelTelemetryExporterBuilder.java @@ -13,15 +13,18 @@ import io.grpc.netty.NettyChannelBuilder; import io.netty.handler.ssl.SslContext; import io.opentelemetry.exporter.internal.TlsConfigHelper; -import io.opentelemetry.exporter.internal.auth.Authenticator; import io.opentelemetry.exporter.internal.grpc.ManagedChannelUtil; import io.opentelemetry.exporter.otlp.internal.OtlpUserAgent; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import java.net.URI; import java.time.Duration; import java.util.Collection; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLException; @@ -76,6 +79,18 @@ public TelemetryExporterBuilder setTimeout(Duration timeout) { return this; } + @Override + public TelemetryExporterBuilder setConnectTimeout(long timeout, TimeUnit unit) { + delegate.setConnectTimeout(timeout, unit); + return this; + } + + @Override + public TelemetryExporterBuilder setConnectTimeout(Duration timeout) { + delegate.setConnectTimeout(timeout); + return this; + } + @Override public TelemetryExporterBuilder setCompression(String compression) { delegate.setCompression(compression); @@ -89,8 +104,8 @@ public TelemetryExporterBuilder addHeader(String key, String value) { } @Override - public TelemetryExporterBuilder setAuthenticator(Authenticator authenticator) { - delegate.setAuthenticator(authenticator); + public TelemetryExporterBuilder setHeaders(Supplier> headerSupplier) { + delegate.setHeaders(headerSupplier); return this; } @@ -113,8 +128,11 @@ public TelemetryExporterBuilder setClientTls(byte[] privateKeyPem, byte[] cer } @Override - public TelemetryExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { + public TelemetryExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy) { delegate.setRetryPolicy(retryPolicy); + if (retryPolicy == null) { + return this; + } String grpcServiceName; if (delegate instanceof GrpcLogRecordExporterBuilderWrapper) { grpcServiceName = "opentelemetry.proto.collector.logs.v1.LogsService"; @@ -131,23 +149,46 @@ public TelemetryExporterBuilder setRetryPolicy(RetryPolicy retryPolicy) { return this; } + @Override + public TelemetryExporterBuilder setProxyOptions(ProxyOptions proxyOptions) { + delegate.setProxyOptions(proxyOptions); + return this; + } + @Override public TelemetryExporterBuilder setChannel(Object channel) { throw new UnsupportedOperationException(); } + @Override + public TelemetryExporterBuilder setServiceClassLoader(ClassLoader serviceClassLoader) { + delegate.setServiceClassLoader(serviceClassLoader); + return this; + } + + @Override + public TelemetryExporterBuilder setExecutorService(ExecutorService executorService) { + delegate.setExecutorService(executorService); + return this; + } + @Override public TelemetryExporter build() { - requireNonNull(channelBuilder, "channel"); + Runnable shutdownCallback; + if (channelBuilder != null) { + try { + setSslContext(channelBuilder, tlsConfigHelper); + } catch (SSLException e) { + throw new IllegalStateException(e); + } - try { - setSslContext(channelBuilder, tlsConfigHelper); - } catch (SSLException e) { - throw new IllegalStateException(e); + ManagedChannel channel = channelBuilder.build(); + delegate.setChannel(channel); + shutdownCallback = channel::shutdownNow; + } else { + shutdownCallback = () -> {}; } - ManagedChannel channel = channelBuilder.build(); - delegate.setChannel(channel); TelemetryExporter delegateExporter = delegate.build(); return new TelemetryExporter() { @Override @@ -162,7 +203,7 @@ public CompletableResultCode export(Collection items) { @Override public CompletableResultCode shutdown() { - channel.shutdownNow(); + shutdownCallback.run(); return delegateExporter.shutdown(); } }; diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/TelemetryExporterBuilder.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/TelemetryExporterBuilder.java index 95b62e88a28..f8cf7ef4567 100644 --- a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/TelemetryExporterBuilder.java +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/TelemetryExporterBuilder.java @@ -5,16 +5,20 @@ package io.opentelemetry.exporter.otlp.testing.internal; -import io.opentelemetry.exporter.internal.auth.Authenticator; import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporterBuilder; import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporterBuilder; import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporterBuilder; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import io.opentelemetry.sdk.logs.data.LogRecordData; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.trace.data.SpanData; import java.time.Duration; +import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; @@ -38,11 +42,15 @@ static TelemetryExporterBuilder wrap(OtlpGrpcLogRecordExporterBui TelemetryExporterBuilder setTimeout(Duration timeout); + TelemetryExporterBuilder setConnectTimeout(long timeout, TimeUnit unit); + + TelemetryExporterBuilder setConnectTimeout(Duration timeout); + TelemetryExporterBuilder setCompression(String compression); TelemetryExporterBuilder addHeader(String key, String value); - TelemetryExporterBuilder setAuthenticator(Authenticator authenticator); + TelemetryExporterBuilder setHeaders(Supplier> headerSupplier); TelemetryExporterBuilder setTrustedCertificates(byte[] certificates); @@ -50,9 +58,15 @@ static TelemetryExporterBuilder wrap(OtlpGrpcLogRecordExporterBui TelemetryExporterBuilder setSslContext(SSLContext sslContext, X509TrustManager trustManager); - TelemetryExporterBuilder setRetryPolicy(RetryPolicy retryPolicy); + TelemetryExporterBuilder setRetryPolicy(@Nullable RetryPolicy retryPolicy); + + TelemetryExporterBuilder setProxyOptions(ProxyOptions proxyOptions); TelemetryExporterBuilder setChannel(Object channel); + TelemetryExporterBuilder setServiceClassLoader(ClassLoader serviceClassLoader); + + TelemetryExporterBuilder setExecutorService(ExecutorService executorService); + TelemetryExporter build(); } diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/compressor/Base64Compressor.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/compressor/Base64Compressor.java new file mode 100644 index 00000000000..b0b3121d635 --- /dev/null +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/compressor/Base64Compressor.java @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.testing.internal.compressor; + +import io.opentelemetry.exporter.internal.compression.Compressor; +import java.io.OutputStream; +import java.util.Base64; + +/** + * This exists to test the compressor SPI mechanism but does not actually compress data in any + * useful way. + */ +public class Base64Compressor implements Compressor { + + private static final Base64Compressor INSTANCE = new Base64Compressor(); + + private Base64Compressor() {} + + public static Base64Compressor getInstance() { + return INSTANCE; + } + + @Override + public String getEncoding() { + return "base64"; + } + + @Override + public OutputStream compress(OutputStream outputStream) { + return Base64.getEncoder().wrap(outputStream); + } +} diff --git a/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/compressor/Base64CompressorProvider.java b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/compressor/Base64CompressorProvider.java new file mode 100644 index 00000000000..8d4b4a6cdbc --- /dev/null +++ b/exporters/otlp/testing-internal/src/main/java/io/opentelemetry/exporter/otlp/testing/internal/compressor/Base64CompressorProvider.java @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.otlp.testing.internal.compressor; + +import io.opentelemetry.exporter.internal.compression.Compressor; +import io.opentelemetry.exporter.internal.compression.CompressorProvider; + +public class Base64CompressorProvider implements CompressorProvider { + + @Override + public Compressor getInstance() { + return Base64Compressor.getInstance(); + } +} diff --git a/exporters/otlp/testing-internal/src/main/resources/META-INF/services/io.opentelemetry.exporter.internal.compression.CompressorProvider b/exporters/otlp/testing-internal/src/main/resources/META-INF/services/io.opentelemetry.exporter.internal.compression.CompressorProvider new file mode 100644 index 00000000000..6fac487c249 --- /dev/null +++ b/exporters/otlp/testing-internal/src/main/resources/META-INF/services/io.opentelemetry.exporter.internal.compression.CompressorProvider @@ -0,0 +1 @@ +io.opentelemetry.exporter.otlp.testing.internal.compressor.Base64CompressorProvider diff --git a/exporters/prometheus/build.gradle.kts b/exporters/prometheus/build.gradle.kts index d746c074bbf..30e52664419 100644 --- a/exporters/prometheus/build.gradle.kts +++ b/exporters/prometheus/build.gradle.kts @@ -1,8 +1,6 @@ plugins { id("otel.java-conventions") id("otel.publish-conventions") - - id("otel.animalsniffer-conventions") } description = "OpenTelemetry Prometheus Exporter" @@ -11,15 +9,20 @@ otelJava.moduleName.set("io.opentelemetry.exporter.prometheus") dependencies { api(project(":sdk:metrics")) + compileOnly(project(":api:incubator")) + implementation(project(":exporters:common")) implementation(project(":sdk-extensions:autoconfigure-spi")) + implementation("io.prometheus:prometheus-metrics-exporter-httpserver") - compileOnly("com.sun.net.httpserver:http") compileOnly("com.google.auto.value:auto-value-annotations") annotationProcessor("com.google.auto.value:auto-value") + testImplementation(project(":sdk:testing")) testImplementation("io.opentelemetry.proto:opentelemetry-proto") - + testImplementation("io.prometheus:prometheus-metrics-shaded-protobuf") + testImplementation("io.prometheus:prometheus-metrics-exposition-formats") + testImplementation("com.sun.net.httpserver:http") testImplementation("com.google.guava:guava") testImplementation("com.linecorp.armeria:armeria") testImplementation("com.linecorp.armeria:armeria-junit5") diff --git a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/NameSanitizer.java b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/NameSanitizer.java deleted file mode 100644 index 9e8c62c4ede..00000000000 --- a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/NameSanitizer.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.prometheus; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.function.Function; -import java.util.regex.Pattern; - -/** Sanitizes a metric or label name. */ -class NameSanitizer implements Function { - - static final NameSanitizer INSTANCE = new NameSanitizer(); - - static final Pattern SANITIZE_CONSECUTIVE_UNDERSCORES = Pattern.compile("[_]{2,}"); - - private static final Pattern SANITIZE_PREFIX_PATTERN = Pattern.compile("^[^a-zA-Z_:]"); - private static final Pattern SANITIZE_BODY_PATTERN = Pattern.compile("[^a-zA-Z0-9_:]"); - - private final Function delegate; - private final Map cache = new ConcurrentHashMap<>(); - - NameSanitizer() { - this(NameSanitizer::sanitizeMetricName); - } - - // visible for testing - NameSanitizer(Function delegate) { - this.delegate = delegate; - } - - @Override - public String apply(String labelName) { - return cache.computeIfAbsent(labelName, delegate); - } - - private static String sanitizeMetricName(String metricName) { - return SANITIZE_CONSECUTIVE_UNDERSCORES - .matcher( - SANITIZE_BODY_PATTERN - .matcher(SANITIZE_PREFIX_PATTERN.matcher(metricName).replaceFirst("_")) - .replaceAll("_")) - .replaceAll("_"); - } -} diff --git a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/Otel2PrometheusConverter.java b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/Otel2PrometheusConverter.java new file mode 100644 index 00000000000..e1cb84d9766 --- /dev/null +++ b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/Otel2PrometheusConverter.java @@ -0,0 +1,645 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.prometheus; + +import static io.prometheus.metrics.model.snapshots.PrometheusNaming.sanitizeLabelName; +import static io.prometheus.metrics.model.snapshots.PrometheusNaming.sanitizeMetricName; +import static java.util.Objects.requireNonNull; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ThrottlingLogger; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; +import io.opentelemetry.sdk.metrics.data.DoublePointData; +import io.opentelemetry.sdk.metrics.data.ExemplarData; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramData; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData; +import io.opentelemetry.sdk.metrics.data.HistogramData; +import io.opentelemetry.sdk.metrics.data.HistogramPointData; +import io.opentelemetry.sdk.metrics.data.LongExemplarData; +import io.opentelemetry.sdk.metrics.data.LongPointData; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.data.SumData; +import io.opentelemetry.sdk.metrics.data.SummaryPointData; +import io.opentelemetry.sdk.metrics.data.ValueAtQuantile; +import io.opentelemetry.sdk.resources.Resource; +import io.prometheus.metrics.model.snapshots.ClassicHistogramBuckets; +import io.prometheus.metrics.model.snapshots.CounterSnapshot; +import io.prometheus.metrics.model.snapshots.CounterSnapshot.CounterDataPointSnapshot; +import io.prometheus.metrics.model.snapshots.Exemplar; +import io.prometheus.metrics.model.snapshots.Exemplars; +import io.prometheus.metrics.model.snapshots.GaugeSnapshot; +import io.prometheus.metrics.model.snapshots.GaugeSnapshot.GaugeDataPointSnapshot; +import io.prometheus.metrics.model.snapshots.HistogramSnapshot; +import io.prometheus.metrics.model.snapshots.HistogramSnapshot.HistogramDataPointSnapshot; +import io.prometheus.metrics.model.snapshots.InfoSnapshot; +import io.prometheus.metrics.model.snapshots.InfoSnapshot.InfoDataPointSnapshot; +import io.prometheus.metrics.model.snapshots.Labels; +import io.prometheus.metrics.model.snapshots.MetricMetadata; +import io.prometheus.metrics.model.snapshots.MetricSnapshot; +import io.prometheus.metrics.model.snapshots.MetricSnapshots; +import io.prometheus.metrics.model.snapshots.NativeHistogramBuckets; +import io.prometheus.metrics.model.snapshots.Quantile; +import io.prometheus.metrics.model.snapshots.Quantiles; +import io.prometheus.metrics.model.snapshots.SummarySnapshot; +import io.prometheus.metrics.model.snapshots.SummarySnapshot.SummaryDataPointSnapshot; +import io.prometheus.metrics.model.snapshots.Unit; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; +import java.util.function.Predicate; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Collectors; +import javax.annotation.Nullable; + +/** Convert OpenTelemetry {@link MetricData} to Prometheus {@link MetricSnapshots}. */ +final class Otel2PrometheusConverter { + + private static final Logger LOGGER = Logger.getLogger(Otel2PrometheusConverter.class.getName()); + private static final ThrottlingLogger THROTTLING_LOGGER = new ThrottlingLogger(LOGGER); + private static final String OTEL_SCOPE_NAME = "otel_scope_name"; + private static final String OTEL_SCOPE_VERSION = "otel_scope_version"; + private static final long NANOS_PER_MILLISECOND = TimeUnit.MILLISECONDS.toNanos(1); + static final int MAX_CACHE_SIZE = 10; + + private final boolean otelScopeEnabled; + @Nullable private final Predicate allowedResourceAttributesFilter; + + /** + * Used only if addResourceAttributesAsLabels is true. Once the cache reaches {@link + * #MAX_CACHE_SIZE}, it is cleared to protect against unbounded conversion over time. + */ + private final Map>> resourceAttributesToAllowedKeysCache; + + /** + * Constructor with feature flag parameter. + * + * @param otelScopeEnabled enable generation of the OpenTelemetry instrumentation scope info + * metric and labels. + * @param allowedResourceAttributesFilter if not {@code null}, resource attributes with keys + * matching this predicate will be added as labels on each exported metric + */ + Otel2PrometheusConverter( + boolean otelScopeEnabled, @Nullable Predicate allowedResourceAttributesFilter) { + this.otelScopeEnabled = otelScopeEnabled; + this.allowedResourceAttributesFilter = allowedResourceAttributesFilter; + this.resourceAttributesToAllowedKeysCache = + allowedResourceAttributesFilter != null + ? new ConcurrentHashMap<>() + : Collections.emptyMap(); + } + + MetricSnapshots convert(@Nullable Collection metricDataCollection) { + if (metricDataCollection == null || metricDataCollection.isEmpty()) { + return MetricSnapshots.of(); + } + Map snapshotsByName = new HashMap<>(metricDataCollection.size()); + Resource resource = null; + Set scopes = new LinkedHashSet<>(); + for (MetricData metricData : metricDataCollection) { + MetricSnapshot snapshot = convert(metricData); + if (snapshot == null) { + continue; + } + putOrMerge(snapshotsByName, snapshot); + if (resource == null) { + resource = metricData.getResource(); + } + if (otelScopeEnabled && !metricData.getInstrumentationScopeInfo().getAttributes().isEmpty()) { + scopes.add(metricData.getInstrumentationScopeInfo()); + } + } + if (resource != null) { + putOrMerge(snapshotsByName, makeTargetInfo(resource)); + } + if (otelScopeEnabled && !scopes.isEmpty()) { + putOrMerge(snapshotsByName, makeScopeInfo(scopes)); + } + return new MetricSnapshots(snapshotsByName.values()); + } + + @Nullable + private MetricSnapshot convert(MetricData metricData) { + + // Note that AggregationTemporality.DELTA should never happen + // because PrometheusMetricReader#getAggregationTemporality returns CUMULATIVE. + + MetricMetadata metadata = convertMetadata(metricData); + InstrumentationScopeInfo scope = metricData.getInstrumentationScopeInfo(); + switch (metricData.getType()) { + case LONG_GAUGE: + return convertLongGauge( + metadata, scope, metricData.getLongGaugeData().getPoints(), metricData.getResource()); + case DOUBLE_GAUGE: + return convertDoubleGauge( + metadata, scope, metricData.getDoubleGaugeData().getPoints(), metricData.getResource()); + case LONG_SUM: + SumData longSumData = metricData.getLongSumData(); + if (longSumData.getAggregationTemporality() == AggregationTemporality.DELTA) { + return null; + } else if (longSumData.isMonotonic()) { + return convertLongCounter( + metadata, scope, longSumData.getPoints(), metricData.getResource()); + } else { + return convertLongGauge( + metadata, scope, longSumData.getPoints(), metricData.getResource()); + } + case DOUBLE_SUM: + SumData doubleSumData = metricData.getDoubleSumData(); + if (doubleSumData.getAggregationTemporality() == AggregationTemporality.DELTA) { + return null; + } else if (doubleSumData.isMonotonic()) { + return convertDoubleCounter( + metadata, scope, doubleSumData.getPoints(), metricData.getResource()); + } else { + return convertDoubleGauge( + metadata, scope, doubleSumData.getPoints(), metricData.getResource()); + } + case HISTOGRAM: + HistogramData histogramData = metricData.getHistogramData(); + if (histogramData.getAggregationTemporality() == AggregationTemporality.DELTA) { + return null; + } else { + return convertHistogram( + metadata, scope, histogramData.getPoints(), metricData.getResource()); + } + case EXPONENTIAL_HISTOGRAM: + ExponentialHistogramData exponentialHistogramData = + metricData.getExponentialHistogramData(); + if (exponentialHistogramData.getAggregationTemporality() == AggregationTemporality.DELTA) { + return null; + } else { + return convertExponentialHistogram( + metadata, scope, exponentialHistogramData.getPoints(), metricData.getResource()); + } + case SUMMARY: + return convertSummary( + metadata, scope, metricData.getSummaryData().getPoints(), metricData.getResource()); + } + return null; + } + + private GaugeSnapshot convertLongGauge( + MetricMetadata metadata, + InstrumentationScopeInfo scope, + Collection dataPoints, + Resource resource) { + List data = new ArrayList<>(dataPoints.size()); + for (LongPointData longData : dataPoints) { + data.add( + new GaugeDataPointSnapshot( + (double) longData.getValue(), + convertAttributes(resource, scope, longData.getAttributes()), + convertLongExemplar(longData.getExemplars()))); + } + return new GaugeSnapshot(metadata, data); + } + + private CounterSnapshot convertLongCounter( + MetricMetadata metadata, + InstrumentationScopeInfo scope, + Collection dataPoints, + Resource resource) { + List data = new ArrayList<>(dataPoints.size()); + for (LongPointData longData : dataPoints) { + data.add( + new CounterDataPointSnapshot( + (double) longData.getValue(), + convertAttributes(resource, scope, longData.getAttributes()), + convertLongExemplar(longData.getExemplars()), + longData.getStartEpochNanos() / NANOS_PER_MILLISECOND)); + } + return new CounterSnapshot(metadata, data); + } + + private GaugeSnapshot convertDoubleGauge( + MetricMetadata metadata, + InstrumentationScopeInfo scope, + Collection dataPoints, + Resource resource) { + List data = new ArrayList<>(dataPoints.size()); + for (DoublePointData doubleData : dataPoints) { + data.add( + new GaugeDataPointSnapshot( + doubleData.getValue(), + convertAttributes(resource, scope, doubleData.getAttributes()), + convertDoubleExemplar(doubleData.getExemplars()))); + } + return new GaugeSnapshot(metadata, data); + } + + private CounterSnapshot convertDoubleCounter( + MetricMetadata metadata, + InstrumentationScopeInfo scope, + Collection dataPoints, + Resource resource) { + List data = new ArrayList<>(dataPoints.size()); + for (DoublePointData doubleData : dataPoints) { + data.add( + new CounterDataPointSnapshot( + doubleData.getValue(), + convertAttributes(resource, scope, doubleData.getAttributes()), + convertDoubleExemplar(doubleData.getExemplars()), + doubleData.getStartEpochNanos() / NANOS_PER_MILLISECOND)); + } + return new CounterSnapshot(metadata, data); + } + + private HistogramSnapshot convertHistogram( + MetricMetadata metadata, + InstrumentationScopeInfo scope, + Collection dataPoints, + Resource resource) { + List data = new ArrayList<>(dataPoints.size()); + for (HistogramPointData histogramData : dataPoints) { + List boundaries = new ArrayList<>(histogramData.getBoundaries().size() + 1); + boundaries.addAll(histogramData.getBoundaries()); + boundaries.add(Double.POSITIVE_INFINITY); + data.add( + new HistogramDataPointSnapshot( + ClassicHistogramBuckets.of(boundaries, histogramData.getCounts()), + histogramData.getSum(), + convertAttributes(resource, scope, histogramData.getAttributes()), + convertDoubleExemplars(histogramData.getExemplars()), + histogramData.getStartEpochNanos() / NANOS_PER_MILLISECOND)); + } + return new HistogramSnapshot(metadata, data); + } + + @Nullable + private HistogramSnapshot convertExponentialHistogram( + MetricMetadata metadata, + InstrumentationScopeInfo scope, + Collection dataPoints, + Resource resource) { + List data = new ArrayList<>(dataPoints.size()); + for (ExponentialHistogramPointData histogramData : dataPoints) { + int scale = histogramData.getScale(); + if (scale < -4) { + THROTTLING_LOGGER.log( + Level.WARNING, + "Dropping histogram " + + metadata.getName() + + " with attributes " + + histogramData.getAttributes() + + " because it has scale < -4 which is unsupported in Prometheus"); + return null; + } + // Scale > 8 are not supported in Prometheus. Histograms with scale > 8 are scaled down to 8. + int scaleDown = scale > 8 ? scale - 8 : 0; + data.add( + new HistogramDataPointSnapshot( + scale - scaleDown, + histogramData.getZeroCount(), + 0L, + convertExponentialHistogramBuckets(histogramData.getPositiveBuckets(), scaleDown), + convertExponentialHistogramBuckets(histogramData.getNegativeBuckets(), scaleDown), + histogramData.getSum(), + convertAttributes(resource, scope, histogramData.getAttributes()), + convertDoubleExemplars(histogramData.getExemplars()), + histogramData.getStartEpochNanos() / NANOS_PER_MILLISECOND)); + } + return new HistogramSnapshot(metadata, data); + } + + private static NativeHistogramBuckets convertExponentialHistogramBuckets( + ExponentialHistogramBuckets buckets, int scaleDown) { + if (buckets.getBucketCounts().isEmpty()) { + return NativeHistogramBuckets.EMPTY; + } + List otelCounts = buckets.getBucketCounts(); + List indexes = new ArrayList<>(otelCounts.size()); + List counts = new ArrayList<>(otelCounts.size()); + int previousIndex = (buckets.getOffset() >> scaleDown) + 1; + long count = 0; + for (int i = 0; i < otelCounts.size(); i++) { + int index = ((buckets.getOffset() + i) >> scaleDown) + 1; + if (index > previousIndex) { + indexes.add(previousIndex); + counts.add(count); + previousIndex = index; + count = 0; + } + count += otelCounts.get(i); + } + indexes.add(previousIndex); + counts.add(count); + return NativeHistogramBuckets.of(indexes, counts); + } + + private SummarySnapshot convertSummary( + MetricMetadata metadata, + InstrumentationScopeInfo scope, + Collection dataPoints, + Resource resource) { + List data = new ArrayList<>(dataPoints.size()); + for (SummaryPointData summaryData : dataPoints) { + data.add( + new SummaryDataPointSnapshot( + summaryData.getCount(), + summaryData.getSum(), + convertQuantiles(summaryData.getValues()), + convertAttributes(resource, scope, summaryData.getAttributes()), + Exemplars.EMPTY, // Exemplars for Summaries not implemented yet. + summaryData.getStartEpochNanos() / NANOS_PER_MILLISECOND)); + } + return new SummarySnapshot(metadata, data); + } + + private static Quantiles convertQuantiles(List values) { + List result = new ArrayList<>(values.size()); + for (ValueAtQuantile value : values) { + result.add(new Quantile(value.getQuantile(), value.getValue())); + } + return Quantiles.of(result); + } + + @Nullable + private Exemplar convertLongExemplar(List exemplars) { + if (exemplars.isEmpty()) { + return null; + } else { + LongExemplarData exemplar = exemplars.get(0); + return convertExemplar((double) exemplar.getValue(), exemplar); + } + } + + /** Converts the first exemplar in the list if available, else returns {#code null}. */ + @Nullable + private Exemplar convertDoubleExemplar(List exemplars) { + if (exemplars.isEmpty()) { + return null; + } else { + DoubleExemplarData exemplar = exemplars.get(0); + return convertExemplar(exemplar.getValue(), exemplar); + } + } + + /** Converts the first exemplar in the list if available, else returns {#code null}. */ + private Exemplars convertDoubleExemplars(List exemplars) { + List result = new ArrayList<>(exemplars.size()); + for (DoubleExemplarData exemplar : exemplars) { + result.add(convertExemplar(exemplar.getValue(), exemplar)); + } + return Exemplars.of(result); + } + + private Exemplar convertExemplar(double value, ExemplarData exemplar) { + SpanContext spanContext = exemplar.getSpanContext(); + if (spanContext.isValid()) { + return new Exemplar( + value, + convertAttributes( + null, // resource attributes are only copied for point's attributes + null, // scope attributes are only needed for point's attributes + exemplar.getFilteredAttributes(), + "trace_id", + spanContext.getTraceId(), + "span_id", + spanContext.getSpanId()), + exemplar.getEpochNanos() / NANOS_PER_MILLISECOND); + } else { + return new Exemplar( + value, + convertAttributes( + null, // resource attributes are only copied for point's attributes + null, // scope attributes are only needed for point's attributes + exemplar.getFilteredAttributes()), + exemplar.getEpochNanos() / NANOS_PER_MILLISECOND); + } + } + + private InfoSnapshot makeTargetInfo(Resource resource) { + return new InfoSnapshot( + new MetricMetadata("target"), + Collections.singletonList( + new InfoDataPointSnapshot( + convertAttributes( + null, // resource attributes are only copied for point's attributes + null, // scope attributes are only needed for point's attributes + resource.getAttributes())))); + } + + private InfoSnapshot makeScopeInfo(Set scopes) { + List prometheusScopeInfos = new ArrayList<>(scopes.size()); + for (InstrumentationScopeInfo scope : scopes) { + prometheusScopeInfos.add( + new InfoDataPointSnapshot( + convertAttributes( + null, // resource attributes are only copied for point's attributes + scope, + scope.getAttributes()))); + } + return new InfoSnapshot(new MetricMetadata("otel_scope"), prometheusScopeInfos); + } + + /** + * Convert OpenTelemetry attributes to Prometheus labels. + * + * @param resource optional resource (attributes) to be converted. + * @param scope will be converted to {@code otel_scope_*} labels if {@code otelScopeEnabled} is + * {@code true}. + * @param attributes the attributes to be converted. + * @param additionalAttributes optional list of key/value pairs, may be empty. + */ + @SuppressWarnings({"rawtypes", "unchecked"}) + private Labels convertAttributes( + @Nullable Resource resource, + @Nullable InstrumentationScopeInfo scope, + Attributes attributes, + String... additionalAttributes) { + + List> allowedAttributeKeys = + allowedResourceAttributesFilter != null + ? filterAllowedResourceAttributeKeys(resource) + : Collections.emptyList(); + + Map labelNameToValue = new HashMap<>(); + attributes.forEach( + (key, value) -> labelNameToValue.put(sanitizeLabelName(key.getKey()), value.toString())); + + for (int i = 0; i < additionalAttributes.length; i += 2) { + labelNameToValue.putIfAbsent( + requireNonNull(additionalAttributes[i]), additionalAttributes[i + 1]); + } + + if (otelScopeEnabled && scope != null) { + labelNameToValue.putIfAbsent(OTEL_SCOPE_NAME, scope.getName()); + if (scope.getVersion() != null) { + labelNameToValue.putIfAbsent(OTEL_SCOPE_VERSION, scope.getVersion()); + } + } + + if (resource != null) { + Attributes resourceAttributes = resource.getAttributes(); + for (AttributeKey attributeKey : allowedAttributeKeys) { + Object attributeValue = resourceAttributes.get(attributeKey); + if (attributeValue != null) { + labelNameToValue.putIfAbsent( + sanitizeLabelName(attributeKey.getKey()), attributeValue.toString()); + } + } + } + + String[] names = new String[labelNameToValue.size()]; + String[] values = new String[labelNameToValue.size()]; + int[] pos = new int[] {0}; + labelNameToValue.forEach( + (name, value) -> { + names[pos[0]] = name; + values[pos[0]] = value; + pos[0] += 1; + }); + + return Labels.of(names, values); + } + + private List> filterAllowedResourceAttributeKeys(@Nullable Resource resource) { + requireNonNull( + allowedResourceAttributesFilter, + "This method should only be called when allowedResourceAttributesFilter is not null."); + if (resource == null) { + return Collections.emptyList(); + } + + List> allowedAttributeKeys = + resourceAttributesToAllowedKeysCache.computeIfAbsent( + resource.getAttributes(), + resourceAttributes -> + resourceAttributes.asMap().keySet().stream() + .filter(o -> allowedResourceAttributesFilter.test(o.getKey())) + .collect(Collectors.toList())); + + if (resourceAttributesToAllowedKeysCache.size() > MAX_CACHE_SIZE) { + resourceAttributesToAllowedKeysCache.clear(); + } + return allowedAttributeKeys; + } + + private static MetricMetadata convertMetadata(MetricData metricData) { + String name = sanitizeMetricName(metricData.getName()); + String help = metricData.getDescription(); + Unit unit = PrometheusUnitsHelper.convertUnit(metricData.getUnit()); + if (unit != null && !name.endsWith(unit.toString())) { + name = name + "_" + unit; + } + // Repeated __ are not allowed according to spec, although this is allowed in prometheus + while (name.contains("__")) { + name = name.replace("__", "_"); + } + + return new MetricMetadata(name, help, unit); + } + + private static void putOrMerge( + Map snapshotsByName, MetricSnapshot snapshot) { + String name = snapshot.getMetadata().getPrometheusName(); + if (snapshotsByName.containsKey(name)) { + MetricSnapshot merged = merge(snapshotsByName.get(name), snapshot); + if (merged != null) { + snapshotsByName.put(name, merged); + } + } else { + snapshotsByName.put(name, snapshot); + } + } + + /** + * OpenTelemetry may use the same metric name multiple times but in different instrumentation + * scopes. In that case, we try to merge the metrics. They will have different {@code + * otel_scope_name} attributes. However, merging is only possible if the metrics have the same + * type. If the type differs, we log a message and drop one of them. + */ + @Nullable + private static MetricSnapshot merge(MetricSnapshot a, MetricSnapshot b) { + MetricMetadata metadata = mergeMetadata(a.getMetadata(), b.getMetadata()); + if (metadata == null) { + return null; + } + int numberOfDataPoints = a.getDataPoints().size() + b.getDataPoints().size(); + if (a instanceof GaugeSnapshot && b instanceof GaugeSnapshot) { + List dataPoints = new ArrayList<>(numberOfDataPoints); + dataPoints.addAll(((GaugeSnapshot) a).getDataPoints()); + dataPoints.addAll(((GaugeSnapshot) b).getDataPoints()); + return new GaugeSnapshot(metadata, dataPoints); + } else if (a instanceof CounterSnapshot && b instanceof CounterSnapshot) { + List dataPoints = new ArrayList<>(numberOfDataPoints); + dataPoints.addAll(((CounterSnapshot) a).getDataPoints()); + dataPoints.addAll(((CounterSnapshot) b).getDataPoints()); + return new CounterSnapshot(metadata, dataPoints); + } else if (a instanceof HistogramSnapshot && b instanceof HistogramSnapshot) { + List dataPoints = new ArrayList<>(numberOfDataPoints); + dataPoints.addAll(((HistogramSnapshot) a).getDataPoints()); + dataPoints.addAll(((HistogramSnapshot) b).getDataPoints()); + return new HistogramSnapshot(metadata, dataPoints); + } else if (a instanceof SummarySnapshot && b instanceof SummarySnapshot) { + List dataPoints = new ArrayList<>(numberOfDataPoints); + dataPoints.addAll(((SummarySnapshot) a).getDataPoints()); + dataPoints.addAll(((SummarySnapshot) b).getDataPoints()); + return new SummarySnapshot(metadata, dataPoints); + } else if (a instanceof InfoSnapshot && b instanceof InfoSnapshot) { + List dataPoints = new ArrayList<>(numberOfDataPoints); + dataPoints.addAll(((InfoSnapshot) a).getDataPoints()); + dataPoints.addAll(((InfoSnapshot) b).getDataPoints()); + return new InfoSnapshot(metadata, dataPoints); + } else { + THROTTLING_LOGGER.log( + Level.WARNING, + "Conflicting metric name " + + a.getMetadata().getPrometheusName() + + ": Found one metric with type " + + typeString(a) + + " and one of type " + + typeString(b) + + ". Dropping the one with type " + + typeString(b) + + "."); + return null; + } + } + + @Nullable + private static MetricMetadata mergeMetadata(MetricMetadata a, MetricMetadata b) { + String name = a.getPrometheusName(); + if (a.getName().equals(b.getName())) { + name = a.getName(); + } + String help = null; + if (a.getHelp() != null && a.getHelp().equals(b.getHelp())) { + help = a.getHelp(); + } + Unit unit = a.getUnit(); + if (unit != null && !unit.equals(b.getUnit())) { + THROTTLING_LOGGER.log( + Level.WARNING, + "Conflicting metrics: Multiple metrics with name " + + name + + " but different units found. Dropping the one with unit " + + b.getUnit() + + "."); + return null; + } + return new MetricMetadata(name, help, unit); + } + + private static String typeString(MetricSnapshot snapshot) { + // Simple helper for a log message. + return snapshot.getClass().getSimpleName().replace("Snapshot", "").toLowerCase(Locale.ENGLISH); + } +} diff --git a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServer.java b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServer.java index cd8f22b1cde..d07bfb8b3ae 100644 --- a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServer.java +++ b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServer.java @@ -10,57 +10,41 @@ package io.opentelemetry.exporter.prometheus; -import static java.util.stream.Collectors.joining; - -import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; -import com.sun.net.httpserver.HttpServer; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.internal.DaemonThreadFactory; +import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; -import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.export.CollectionRegistration; +import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; import io.opentelemetry.sdk.metrics.export.MetricReader; -import io.opentelemetry.sdk.metrics.internal.export.MetricProducer; +import io.prometheus.metrics.exporter.httpserver.HTTPServer; +import io.prometheus.metrics.model.registry.PrometheusRegistry; import java.io.IOException; -import java.io.OutputStream; import java.io.UncheckedIOException; -import java.net.HttpURLConnection; -import java.net.InetAddress; import java.net.InetSocketAddress; -import java.net.URLDecoder; -import java.nio.charset.StandardCharsets; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; -import java.util.function.Supplier; -import java.util.logging.Level; -import java.util.logging.Logger; -import java.util.zip.GZIPOutputStream; import javax.annotation.Nullable; /** * A {@link MetricReader} that starts an HTTP server that will collect metrics and serialize to * Prometheus text format on request. */ -// Very similar to -// https://github.com/prometheus/client_java/blob/master/simpleclient_httpserver/src/main/java/io/prometheus/client/exporter/HTTPServer.java public final class PrometheusHttpServer implements MetricReader { - private static final DaemonThreadFactory THREAD_FACTORY = - new DaemonThreadFactory("prometheus-http"); - private static final Logger LOGGER = Logger.getLogger(PrometheusHttpServer.class.getName()); - - private final HttpServer server; - private final ExecutorService executor; - private volatile MetricProducer metricProducer = MetricProducer.noop(); + private final PrometheusHttpServerBuilder builder; + private final HTTPServer httpServer; + private final PrometheusMetricReader prometheusMetricReader; + private final PrometheusRegistry prometheusRegistry; + private final String host; + private final MemoryMode memoryMode; + private final DefaultAggregationSelector defaultAggregationSelector; /** * Returns a new {@link PrometheusHttpServer} which can be registered to an {@link @@ -76,91 +60,93 @@ public static PrometheusHttpServerBuilder builder() { return new PrometheusHttpServerBuilder(); } - PrometheusHttpServer(String host, int port, ExecutorService executor) { + PrometheusHttpServer( + PrometheusHttpServerBuilder builder, + String host, + int port, + @Nullable ExecutorService executor, + PrometheusRegistry prometheusRegistry, + boolean otelScopeEnabled, + @Nullable Predicate allowedResourceAttributesFilter, + MemoryMode memoryMode, + @Nullable HttpHandler defaultHandler, + DefaultAggregationSelector defaultAggregationSelector) { + this.builder = builder; + this.prometheusMetricReader = + new PrometheusMetricReader(otelScopeEnabled, allowedResourceAttributesFilter); + this.host = host; + this.memoryMode = memoryMode; + this.prometheusRegistry = prometheusRegistry; + prometheusRegistry.register(prometheusMetricReader); + // When memory mode is REUSABLE_DATA, concurrent reads lead to data corruption. To prevent this, + // we configure prometheus with a single thread executor such that requests are handled + // sequentially. + if (memoryMode == MemoryMode.REUSABLE_DATA) { + executor = + new ThreadPoolExecutor( + 1, + 1, + 0L, + TimeUnit.MILLISECONDS, + new LinkedBlockingQueue<>(), + new DaemonThreadFactory("prometheus-http-server")); + } try { - server = createServer(host, port); + this.httpServer = + HTTPServer.builder() + .hostname(host) + .port(port) + .executorService(executor) + .registry(prometheusRegistry) + .defaultHandler(defaultHandler) + .buildAndStart(); } catch (IOException e) { throw new UncheckedIOException("Could not create Prometheus HTTP server", e); } - MetricsHandler metricsHandler = - new MetricsHandler(() -> getMetricProducer().collectAllMetrics()); - server.createContext("/", metricsHandler); - server.createContext("/metrics", metricsHandler); - server.createContext("/-/healthy", HealthHandler.INSTANCE); - this.executor = executor; - server.setExecutor(executor); - - start(); - } - - private static HttpServer createServer(String host, int port) throws IOException { - IOException exception = null; - for (InetAddress address : InetAddress.getAllByName(host)) { - try { - return HttpServer.create(new InetSocketAddress(address, port), 3); - } catch (IOException e) { - if (exception == null) { - exception = e; - } else { - exception.addSuppressed(e); - } - } - } - assert exception != null; - throw exception; + this.defaultAggregationSelector = defaultAggregationSelector; } - private MetricProducer getMetricProducer() { - return metricProducer; + @Override + public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) { + return prometheusMetricReader.getAggregationTemporality(instrumentType); } - private void start() { - // server.start must be called from a daemon thread for it to be a daemon. - if (Thread.currentThread().isDaemon()) { - server.start(); - return; - } - - Thread thread = THREAD_FACTORY.newThread(server::start); - thread.start(); - try { - thread.join(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } + @Override + public Aggregation getDefaultAggregation(InstrumentType instrumentType) { + return defaultAggregationSelector.getDefaultAggregation(instrumentType); } @Override - public void register(CollectionRegistration registration) { - this.metricProducer = MetricProducer.asMetricProducer(registration); + public MemoryMode getMemoryMode() { + return memoryMode; } @Override - public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) { - return AggregationTemporality.CUMULATIVE; + public void register(CollectionRegistration registration) { + prometheusMetricReader.register(registration); } @Override public CompletableResultCode forceFlush() { - return CompletableResultCode.ofSuccess(); + return prometheusMetricReader.forceFlush(); } @Override public CompletableResultCode shutdown() { CompletableResultCode result = new CompletableResultCode(); - Thread thread = - THREAD_FACTORY.newThread( - () -> { - try { - server.stop(10); - executor.shutdownNow(); - } catch (Throwable t) { - result.fail(); - return; - } - result.succeed(); - }); - thread.start(); + Runnable shutdownFunction = + () -> { + try { + prometheusRegistry.unregister(prometheusMetricReader); + httpServer.stop(); + prometheusMetricReader.shutdown().whenComplete(result::succeed); + } catch (Throwable t) { + result.fail(); + } + }; + Thread shutdownThread = new Thread(shutdownFunction, "prometheus-httpserver-shutdown"); + shutdownThread.setDaemon(true); + shutdownThread.start(); return result; } @@ -171,112 +157,18 @@ public void close() { @Override public String toString() { - return "PrometheusHttpServer{address=" + server.getAddress() + "}"; + return "PrometheusHttpServer{address=" + getAddress() + "}"; } - // Visible for testing. - InetSocketAddress getAddress() { - return server.getAddress(); - } - - private static class MetricsHandler implements HttpHandler { - - private final Set allConflictHeaderNames = - Collections.newSetFromMap(new ConcurrentHashMap<>()); - - private final Supplier> metricsSupplier; - - private MetricsHandler(Supplier> metricsSupplier) { - this.metricsSupplier = metricsSupplier; - } - - @Override - public void handle(HttpExchange exchange) throws IOException { - Collection metrics = metricsSupplier.get(); - Set requestedNames = parseQuery(exchange.getRequestURI().getRawQuery()); - Predicate filter = - requestedNames.isEmpty() ? unused -> true : requestedNames::contains; - Serializer serializer = - Serializer.create(exchange.getRequestHeaders().getFirst("Accept"), filter); - exchange.getResponseHeaders().set("Content-Type", serializer.contentType()); - - boolean compress = shouldUseCompression(exchange); - if (compress) { - exchange.getResponseHeaders().set("Content-Encoding", "gzip"); - } - - if (exchange.getRequestMethod().equals("HEAD")) { - exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, -1); - } else { - exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, 0); - OutputStream out; - if (compress) { - out = new GZIPOutputStream(exchange.getResponseBody()); - } else { - out = exchange.getResponseBody(); - } - Set conflictHeaderNames = serializer.write(metrics, out); - conflictHeaderNames.removeAll(allConflictHeaderNames); - if (conflictHeaderNames.size() > 0 && LOGGER.isLoggable(Level.WARNING)) { - LOGGER.log( - Level.WARNING, - "Metric conflict(s) detected. Multiple metrics with same name but different type: " - + conflictHeaderNames.stream().collect(joining(",", "[", "]"))); - allConflictHeaderNames.addAll(conflictHeaderNames); - } - } - exchange.close(); - } - } - - private static boolean shouldUseCompression(HttpExchange exchange) { - List encodingHeaders = exchange.getRequestHeaders().get("Accept-Encoding"); - if (encodingHeaders == null) { - return false; - } - - for (String encodingHeader : encodingHeaders) { - String[] encodings = encodingHeader.split(","); - for (String encoding : encodings) { - if (encoding.trim().equalsIgnoreCase("gzip")) { - return true; - } - } - } - return false; - } - - private static Set parseQuery(@Nullable String query) throws IOException { - if (query == null) { - return Collections.emptySet(); - } - Set names = new HashSet<>(); - String[] pairs = query.split("&"); - for (String pair : pairs) { - int idx = pair.indexOf("="); - if (idx != -1 && URLDecoder.decode(pair.substring(0, idx), "UTF-8").equals("name[]")) { - names.add(URLDecoder.decode(pair.substring(idx + 1), "UTF-8")); - } - } - return names; + /** + * Returns a new {@link PrometheusHttpServerBuilder} with the same configuration as this instance. + */ + public PrometheusHttpServerBuilder toBuilder() { + return new PrometheusHttpServerBuilder(builder); } - private enum HealthHandler implements HttpHandler { - INSTANCE; - - private static final byte[] RESPONSE = "Exporter is Healthy.".getBytes(StandardCharsets.UTF_8); - private static final String CONTENT_LENGTH_VALUE = String.valueOf(RESPONSE.length); - - @Override - public void handle(HttpExchange exchange) throws IOException { - exchange.getResponseHeaders().set("Content-Length", CONTENT_LENGTH_VALUE); - if (exchange.getRequestMethod().equals("HEAD")) { - exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, -1); - } else { - exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, RESPONSE.length); - exchange.getResponseBody().write(RESPONSE); - } - exchange.close(); - } + // Visible for testing. + InetSocketAddress getAddress() { + return new InetSocketAddress(host, httpServer.getPort()); } } diff --git a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServerBuilder.java b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServerBuilder.java index 539f46811ed..b43f7e089e1 100644 --- a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServerBuilder.java +++ b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServerBuilder.java @@ -8,9 +8,15 @@ import static io.opentelemetry.api.internal.Utils.checkArgument; import static java.util.Objects.requireNonNull; -import io.opentelemetry.sdk.internal.DaemonThreadFactory; +import com.sun.net.httpserver.HttpHandler; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import io.prometheus.metrics.model.registry.PrometheusRegistry; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import java.util.function.Predicate; import javax.annotation.Nullable; /** A builder for {@link PrometheusHttpServer}. */ @@ -18,11 +24,31 @@ public final class PrometheusHttpServerBuilder { static final int DEFAULT_PORT = 9464; private static final String DEFAULT_HOST = "0.0.0.0"; + private static final MemoryMode DEFAULT_MEMORY_MODE = MemoryMode.REUSABLE_DATA; private String host = DEFAULT_HOST; private int port = DEFAULT_PORT; - + private PrometheusRegistry prometheusRegistry = new PrometheusRegistry(); + private boolean otelScopeEnabled = true; + @Nullable private Predicate allowedResourceAttributesFilter; @Nullable private ExecutorService executor; + private MemoryMode memoryMode = DEFAULT_MEMORY_MODE; + @Nullable private HttpHandler defaultHandler; + private DefaultAggregationSelector defaultAggregationSelector = + DefaultAggregationSelector.getDefault(); + + PrometheusHttpServerBuilder() {} + + PrometheusHttpServerBuilder(PrometheusHttpServerBuilder builder) { + this.host = builder.host; + this.port = builder.port; + this.prometheusRegistry = builder.prometheusRegistry; + this.otelScopeEnabled = builder.otelScopeEnabled; + this.allowedResourceAttributesFilter = builder.allowedResourceAttributesFilter; + this.executor = builder.executor; + this.memoryMode = builder.memoryMode; + this.defaultAggregationSelector = builder.defaultAggregationSelector; + } /** Sets the host to bind to. If unset, defaults to {@value #DEFAULT_HOST}. */ public PrometheusHttpServerBuilder setHost(String host) { @@ -46,21 +72,100 @@ public PrometheusHttpServerBuilder setExecutor(ExecutorService executor) { return this; } + /** Sets the {@link PrometheusRegistry} to be used for {@link PrometheusHttpServer}. */ + @SuppressWarnings("UnusedReturnValue") + public PrometheusHttpServerBuilder setPrometheusRegistry(PrometheusRegistry prometheusRegistry) { + requireNonNull(prometheusRegistry, "prometheusRegistry"); + this.prometheusRegistry = prometheusRegistry; + return this; + } + + /** Set if the {@code otel_scope_*} attributes are generated. Default is {@code true}. */ + @SuppressWarnings("UnusedReturnValue") + public PrometheusHttpServerBuilder setOtelScopeEnabled(boolean otelScopeEnabled) { + this.otelScopeEnabled = otelScopeEnabled; + return this; + } + + /** + * Set if the resource attributes should be added as labels on each exported metric. + * + *

If set, resource attributes will be added as labels on each exported metric if their key + * tests positive (true) when passed through {@code resourceAttributesFilter}. + * + * @param resourceAttributesFilter a predicate that returns true if the resource attribute should + * be added as a label on each exported metric. The predicates input is the resource attribute + * key. + */ + public PrometheusHttpServerBuilder setAllowedResourceAttributesFilter( + Predicate resourceAttributesFilter) { + this.allowedResourceAttributesFilter = requireNonNull(resourceAttributesFilter); + return this; + } + + /** + * Set the {@link MemoryMode}. + * + *

If set to {@link MemoryMode#REUSABLE_DATA}, requests are served sequentially which is + * accomplished by overriding {@link #setExecutor(ExecutorService)} to {@link + * Executors#newSingleThreadExecutor()}. + */ + public PrometheusHttpServerBuilder setMemoryMode(MemoryMode memoryMode) { + requireNonNull(memoryMode, "memoryMode"); + this.memoryMode = memoryMode; + return this; + } + + /** + * Override the default handler for serving the "/", "/**" endpoint. + * + *

This can be used to serve metrics on additional paths besides the default "/metrics". For + * example: + * PrometheusHttpServer.builder() + * .setPrometheusRegistry(prometheusRegistry) + * .setDefaultHandler(new MetricsHandler(prometheusRegistry)) + * .build() + * + */ + public PrometheusHttpServerBuilder setDefaultHandler(HttpHandler defaultHandler) { + requireNonNull(defaultHandler, "defaultHandler"); + this.defaultHandler = defaultHandler; + return this; + } + + /** + * Set the {@link DefaultAggregationSelector} used for {@link + * MetricExporter#getDefaultAggregation(InstrumentType)}. + * + *

If unset, defaults to {@link DefaultAggregationSelector#getDefault()}. + */ + public PrometheusHttpServerBuilder setDefaultAggregationSelector( + DefaultAggregationSelector defaultAggregationSelector) { + requireNonNull(defaultAggregationSelector, "defaultAggregationSelector"); + this.defaultAggregationSelector = defaultAggregationSelector; + return this; + } + /** * Returns a new {@link PrometheusHttpServer} with the configuration of this builder which can be * registered with a {@link io.opentelemetry.sdk.metrics.SdkMeterProvider}. */ public PrometheusHttpServer build() { - ExecutorService executorService = this.executor; - if (executorService == null) { - executorService = getDefaultExecutor(); + if (memoryMode == MemoryMode.REUSABLE_DATA && executor != null) { + throw new IllegalArgumentException( + "MemoryMode REUSEABLE_DATA cannot be used with custom executor, " + + "since data may be corrupted if reading metrics concurrently"); } - return new PrometheusHttpServer(host, port, executorService); - } - - PrometheusHttpServerBuilder() {} - - private static ExecutorService getDefaultExecutor() { - return Executors.newFixedThreadPool(5, new DaemonThreadFactory("prometheus-http")); + return new PrometheusHttpServer( + new PrometheusHttpServerBuilder(this), // copy to prevent modification + host, + port, + executor, + prometheusRegistry, + otelScopeEnabled, + allowedResourceAttributesFilter, + memoryMode, + defaultHandler, + defaultAggregationSelector); } } diff --git a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusMetricNameMapper.java b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusMetricNameMapper.java deleted file mode 100644 index 0cdc35ea33d..00000000000 --- a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusMetricNameMapper.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.prometheus; - -import com.google.auto.value.AutoValue; -import io.opentelemetry.api.internal.StringUtils; -import io.opentelemetry.sdk.metrics.data.MetricData; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.function.BiFunction; -import javax.annotation.concurrent.Immutable; - -/** A class that maps a raw metric name to Prometheus equivalent name. */ -class PrometheusMetricNameMapper implements BiFunction { - private static final String TOTAL_SUFFIX = "_total"; - static final PrometheusMetricNameMapper INSTANCE = new PrometheusMetricNameMapper(); - - private final Map cache = new ConcurrentHashMap<>(); - private final BiFunction delegate; - - // private constructor - prevent external object initialization - private PrometheusMetricNameMapper() { - this(PrometheusMetricNameMapper::mapToPrometheusName); - } - - // Visible for testing - PrometheusMetricNameMapper(BiFunction delegate) { - this.delegate = delegate; - } - - @Override - public String apply(MetricData rawMetric, PrometheusType prometheusType) { - return cache.computeIfAbsent( - createKeyForCacheMapping(rawMetric, prometheusType), - metricData -> delegate.apply(rawMetric, prometheusType)); - } - - private static String mapToPrometheusName(MetricData rawMetric, PrometheusType prometheusType) { - String name = NameSanitizer.INSTANCE.apply(rawMetric.getName()); - String prometheusEquivalentUnit = - PrometheusUnitsHelper.getEquivalentPrometheusUnit(rawMetric.getUnit()); - boolean shouldAppendUnit = - !StringUtils.isNullOrEmpty(prometheusEquivalentUnit) - && !name.contains(prometheusEquivalentUnit); - // trim counter's _total suffix so the unit is placed before it. - if (prometheusType == PrometheusType.COUNTER && name.endsWith(TOTAL_SUFFIX)) { - name = name.substring(0, name.length() - TOTAL_SUFFIX.length()); - } - // append prometheus unit if not null or empty. - if (shouldAppendUnit) { - name = name + "_" + prometheusEquivalentUnit; - } - - // replace _total suffix, or add if it wasn't already present. - if (prometheusType == PrometheusType.COUNTER) { - name = name + TOTAL_SUFFIX; - } - // special case - gauge - if (rawMetric.getUnit().equals("1") - && prometheusType == PrometheusType.GAUGE - && !name.contains("ratio")) { - name = name + "_ratio"; - } - return name; - } - - /** - * Creates a suitable mapping key to be used for maintaining mapping between raw metric and its - * equivalent Prometheus name. - * - * @param metricData the metric data for which the mapping is to be created. - * @param prometheusType the prometheus type to which the metric is to be mapped. - * @return an {@link ImmutableMappingKey} that can be used as a key for mapping between metric - * data and its prometheus equivalent name. - */ - private static ImmutableMappingKey createKeyForCacheMapping( - MetricData metricData, PrometheusType prometheusType) { - return ImmutableMappingKey.create( - metricData.getName(), metricData.getUnit(), prometheusType.name()); - } - - /** - * Objects of this class acts as mapping keys for Prometheus metric mapping cache used in {@link - * PrometheusMetricNameMapper}. - */ - @Immutable - @AutoValue - abstract static class ImmutableMappingKey { - static ImmutableMappingKey create( - String rawMetricName, String rawMetricUnit, String prometheusType) { - return new AutoValue_PrometheusMetricNameMapper_ImmutableMappingKey( - rawMetricName, rawMetricUnit, prometheusType); - } - - abstract String rawMetricName(); - - abstract String rawMetricUnit(); - - abstract String prometheusType(); - } -} diff --git a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusMetricReader.java b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusMetricReader.java new file mode 100644 index 00000000000..b51c83ab6f7 --- /dev/null +++ b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusMetricReader.java @@ -0,0 +1,63 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.prometheus; + +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.export.CollectionRegistration; +import io.opentelemetry.sdk.metrics.export.MetricReader; +import io.prometheus.metrics.model.registry.MultiCollector; +import io.prometheus.metrics.model.snapshots.MetricSnapshots; +import java.util.function.Predicate; +import javax.annotation.Nullable; + +/** + * This is the bridge between Prometheus and OpenTelemetry. + * + *

The {@link PrometheusMetricReader} is a Prometheus {@link MultiCollector} and can be + * registered with the {@link io.prometheus.metrics.model.registry.PrometheusRegistry + * PrometheusRegistry}. It's also an OpenTelemetry {@link MetricReader} and can be registered with a + * {@link io.opentelemetry.sdk.metrics.SdkMeterProvider SdkMeterProvider}. + */ +public class PrometheusMetricReader implements MetricReader, MultiCollector { + + private volatile CollectionRegistration collectionRegistration = CollectionRegistration.noop(); + private final Otel2PrometheusConverter converter; + + // TODO: refactor to public static create or builder pattern to align with project style + /** See {@link Otel2PrometheusConverter#Otel2PrometheusConverter(boolean, Predicate)}. */ + public PrometheusMetricReader( + boolean otelScopeEnabled, @Nullable Predicate allowedResourceAttributesFilter) { + this.converter = + new Otel2PrometheusConverter(otelScopeEnabled, allowedResourceAttributesFilter); + } + + @Override + public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) { + return AggregationTemporality.CUMULATIVE; + } + + @Override + public void register(CollectionRegistration registration) { + this.collectionRegistration = registration; + } + + @Override + public CompletableResultCode forceFlush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public MetricSnapshots collect() { + return converter.convert(collectionRegistration.collectAllMetrics()); + } +} diff --git a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusType.java b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusType.java deleted file mode 100644 index 8f55022d3b8..00000000000 --- a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusType.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.prometheus; - -import io.opentelemetry.sdk.metrics.data.DoublePointData; -import io.opentelemetry.sdk.metrics.data.LongPointData; -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.data.SumData; - -// Four types we use are same in prometheus and openmetrics format -enum PrometheusType { - GAUGE("gauge"), - COUNTER("counter"), - SUMMARY("summary"), - HISTOGRAM("histogram"); - - private final String typeString; - - PrometheusType(String typeString) { - this.typeString = typeString; - } - - static PrometheusType forMetric(MetricData metric) { - switch (metric.getType()) { - case LONG_GAUGE: - case DOUBLE_GAUGE: - return GAUGE; - case LONG_SUM: - SumData longSumData = metric.getLongSumData(); - if (longSumData.isMonotonic()) { - return COUNTER; - } - return GAUGE; - case DOUBLE_SUM: - SumData doubleSumData = metric.getDoubleSumData(); - if (doubleSumData.isMonotonic()) { - return COUNTER; - } - return GAUGE; - case SUMMARY: - return SUMMARY; - case HISTOGRAM: - case EXPONENTIAL_HISTOGRAM: - return HISTOGRAM; - } - throw new IllegalArgumentException( - "Unsupported metric type, this generally indicates version misalignment " - + "among opentelemetry dependencies. Please make sure to use opentelemetry-bom."); - } - - String getTypeString() { - return typeString; - } -} diff --git a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusUnitsHelper.java b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusUnitsHelper.java index a0ca81669d9..b2a9c856992 100644 --- a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusUnitsHelper.java +++ b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/PrometheusUnitsHelper.java @@ -5,208 +5,104 @@ package io.opentelemetry.exporter.prometheus; -import static io.opentelemetry.exporter.prometheus.NameSanitizer.SANITIZE_CONSECUTIVE_UNDERSCORES; +import io.prometheus.metrics.model.snapshots.PrometheusNaming; +import io.prometheus.metrics.model.snapshots.Unit; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import javax.annotation.Nullable; -import io.opentelemetry.api.internal.StringUtils; -import java.util.regex.Pattern; +/** Convert OpenTelemetry unit names to Prometheus units. */ +class PrometheusUnitsHelper { -/** - * A utility class that contains helper function(s) to aid conversion from OTLP to Prometheus units. - * - * @see OpenMetrics - * specification for units - * @see Prometheus best practices - * for units - */ -final class PrometheusUnitsHelper { - - private static final Pattern INVALID_CHARACTERS_PATTERN = Pattern.compile("[^a-zA-Z0-9]"); - private static final Pattern CHARACTERS_BETWEEN_BRACES_PATTERN = Pattern.compile("\\{(.*?)}"); - private static final Pattern SANITIZE_LEADING_UNDERSCORES = Pattern.compile("^_+"); - private static final Pattern SANITIZE_TRAILING_UNDERSCORES = Pattern.compile("_+$"); + private static final Map pluralNames = new ConcurrentHashMap<>(); + private static final Map singularNames = new ConcurrentHashMap<>(); + private static final Map predefinedUnits = new ConcurrentHashMap<>(); - private PrometheusUnitsHelper() { - // Prevent object creation for utility classes + // See + // https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/c3b2997563106e11d39f66eec629fde25dce2bdd/pkg/translator/prometheus/normalize_name.go#L19-L19 + static { + // Time + initUnit("a", "years", "year"); + initUnit("mo", "months", "month"); + initUnit("wk", "weeks", "week"); + initUnit("d", "days", "day"); + initUnit("h", "hours", "hour"); + initUnit("min", "minutes", "minute"); + initUnit("s", "seconds", "second"); + initUnit("ms", "milliseconds", "millisecond"); + initUnit("us", "microseconds", "microsecond"); + initUnit("ns", "nanoseconds", "nanosecond"); + // Bytes + initUnit("By", "bytes", "byte"); + initUnit("KiBy", "kibibytes", "kibibyte"); + initUnit("MiBy", "mebibytes", "mebibyte"); + initUnit("GiBy", "gibibytes", "gibibyte"); + initUnit("TiBy", "tibibytes", "tibibyte"); + initUnit("KBy", "kilobytes", "kilobyte"); + initUnit("MBy", "megabytes", "megabyte"); + initUnit("GBy", "gigabytes", "gigabyte"); + initUnit("TBy", "terabytes", "terabyte"); + // SI + initUnit("m", "meters", "meter"); + initUnit("V", "volts", "volt"); + initUnit("A", "amperes", "ampere"); + initUnit("J", "joules", "joule"); + initUnit("W", "watts", "watt"); + initUnit("g", "grams", "gram"); + // Misc + initUnit("Cel", "celsius"); + initUnit("Hz", "hertz"); + initUnit("%", "percent"); + initUnit("1", "ratio"); } - /** - * A utility function that returns the equivalent Prometheus name for the provided OTLP metric - * unit. - * - * @param rawMetricUnitName The raw metric unit for which Prometheus metric unit needs to be - * computed. - * @return the computed Prometheus metric unit equivalent of the OTLP metric un - */ - static String getEquivalentPrometheusUnit(String rawMetricUnitName) { - if (StringUtils.isNullOrEmpty(rawMetricUnitName)) { - return rawMetricUnitName; - } - // Drop units specified between curly braces - String convertedMetricUnitName = removeUnitPortionInBraces(rawMetricUnitName); - // Handling for the "per" unit(s), e.g. foo/bar -> foo_per_bar - convertedMetricUnitName = convertRateExpressedToPrometheusUnit(convertedMetricUnitName); - // Converting abbreviated unit names to full names - return cleanUpString(getPrometheusUnit(convertedMetricUnitName)); - } + private PrometheusUnitsHelper() {} - /** - * This method is used to convert the units expressed as a rate via '/' symbol in their name to - * their expanded text equivalent. For instance, km/h => km_per_hour. The method operates on the - * input by splitting it in 2 parts - before and after '/' symbol and will attempt to expand any - * known unit abbreviation in both parts. Unknown abbreviations & unsupported characters will - * remain unchanged in the final output of this function. - * - * @param rateExpressedUnit The rate unit input that needs to be converted to its text equivalent. - * @return The text equivalent of unit expressed as rate. If the input does not contain '/', the - * function returns it as-is. - */ - private static String convertRateExpressedToPrometheusUnit(String rateExpressedUnit) { - if (!rateExpressedUnit.contains("/")) { - return rateExpressedUnit; - } - String[] rateEntities = rateExpressedUnit.split("/", 2); - // Only convert rate expressed units if it's a valid expression - if (rateEntities[1].equals("")) { - return rateExpressedUnit; - } - return getPrometheusUnit(rateEntities[0]) + "_per_" + getPrometheusPerUnit(rateEntities[1]); + private static void initUnit(String otelName, String pluralName) { + pluralNames.put(otelName, pluralName); + predefinedUnits.put(otelName, new Unit(pluralName)); } - /** - * This method drops all characters enclosed within '{}' (including the curly braces) by replacing - * them with an empty string. Note that this method will not produce the intended effect if there - * are nested curly braces within the outer enclosure of '{}'. - * - *

For instance, {packet{s}s} => s}. - * - * @param unit The input unit from which text within curly braces needs to be removed. - * @return The resulting unit after removing the text within '{}'. - */ - private static String removeUnitPortionInBraces(String unit) { - return CHARACTERS_BETWEEN_BRACES_PATTERN.matcher(unit).replaceAll(""); + private static void initUnit(String otelName, String pluralName, String singularName) { + initUnit(otelName, pluralName); + singularNames.put(otelName, singularName); } - /** - * Replaces all characters that are not a letter or a digit with '_' to make the resulting string - * Prometheus compliant. This method also removes leading and trailing underscores - this is done - * to keep the resulting unit similar to what is produced from the collector's implementation. - * - * @param string The string input that needs to be made Prometheus compliant. - * @return the cleaned-up Prometheus compliant string. - */ - private static String cleanUpString(String string) { - return SANITIZE_LEADING_UNDERSCORES - .matcher( - SANITIZE_TRAILING_UNDERSCORES - .matcher( - SANITIZE_CONSECUTIVE_UNDERSCORES - .matcher(INVALID_CHARACTERS_PATTERN.matcher(string).replaceAll("_")) - .replaceAll("_")) - .replaceAll("")) - .replaceAll(""); - } - - /** - * This method retrieves the expanded Prometheus unit name for known abbreviations. OTLP metrics - * use the c/s notation as specified at UCUM. The list of - * mappings is adopted from OpenTelemetry - * Collector Contrib. - * - * @param unitAbbreviation The unit that name that needs to be expanded/converted to Prometheus - * units. - * @return The expanded/converted unit name if known, otherwise returns the input unit name as-is. - */ - private static String getPrometheusUnit(String unitAbbreviation) { - switch (unitAbbreviation) { - // Time - case "d": - return "days"; - case "h": - return "hours"; - case "min": - return "minutes"; - case "s": - return "seconds"; - case "ms": - return "milliseconds"; - case "us": - return "microseconds"; - case "ns": - return "nanoseconds"; - // Bytes - case "By": - return "bytes"; - case "KiBy": - return "kibibytes"; - case "MiBy": - return "mebibytes"; - case "GiBy": - return "gibibytes"; - case "TiBy": - return "tibibytes"; - case "KBy": - return "kilobytes"; - case "MBy": - return "megabytes"; - case "GBy": - return "gigabytes"; - case "TBy": - return "terabytes"; - // SI - case "m": - return "meters"; - case "V": - return "volts"; - case "A": - return "amperes"; - case "J": - return "joules"; - case "W": - return "watts"; - case "g": - return "grams"; - // Misc - case "Cel": - return "celsius"; - case "Hz": - return "hertz"; - case "1": - return ""; - case "%": - return "percent"; - default: - return unitAbbreviation; + @Nullable + static Unit convertUnit(String otelUnit) { + if (otelUnit.isEmpty()) { + return null; + } + if (otelUnit.contains("{")) { + otelUnit = otelUnit.replaceAll("\\{[^}]*}", "").trim(); + if (otelUnit.isEmpty() || otelUnit.equals("/")) { + return null; + } + } + if (predefinedUnits.containsKey(otelUnit)) { + return predefinedUnits.get(otelUnit); + } + if (otelUnit.contains("/")) { + String[] parts = otelUnit.split("/", 2); + String part1 = pluralNames.getOrDefault(parts[0], parts[0]).trim(); + String part2 = singularNames.getOrDefault(parts[1], parts[1]).trim(); + if (part1.isEmpty()) { + return unitOrNull("per_" + part2); + } else { + return unitOrNull(part1 + "_per_" + part2); + } } + return unitOrNull(otelUnit); } - /** - * This method retrieves the expanded Prometheus unit name to be used with "per" units for known - * units. For example: s => per second (singular) - * - * @param perUnitAbbreviation The unit abbreviation used in a 'per' unit. - * @return The expanded unit equivalent to be used in 'per' unit if the input is a known unit, - * otherwise returns the input as-is. - */ - private static String getPrometheusPerUnit(String perUnitAbbreviation) { - switch (perUnitAbbreviation) { - case "s": - return "second"; - case "m": - return "minute"; - case "h": - return "hour"; - case "d": - return "day"; - case "w": - return "week"; - case "mo": - return "month"; - case "y": - return "year"; - default: - return perUnitAbbreviation; + @Nullable + private static Unit unitOrNull(String name) { + try { + return new Unit(PrometheusNaming.sanitizeUnitName(name)); + } catch (IllegalArgumentException e) { + // This happens if the name cannot be converted to a valid Prometheus unit name, + // for example if name is "total". + return null; } } } diff --git a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/Serializer.java b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/Serializer.java deleted file mode 100644 index 7a074d0663b..00000000000 --- a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/Serializer.java +++ /dev/null @@ -1,661 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -// Includes work from: - -/* - * Prometheus instrumentation library for JVM applications - * Copyright 2012-2015 The Prometheus Authors - * - * This product includes software developed at - * Boxever Ltd. (http://www.boxever.com/). - * - * This product includes software developed at - * SoundCloud Ltd. (http://soundcloud.com/). - * - * This product includes software developed as part of the - * Ocelli project by Netflix Inc. (https://github.com/Netflix/ocelli/). - */ - -package io.opentelemetry.exporter.prometheus; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.trace.SpanContext; -import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.metrics.data.AggregationTemporality; -import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; -import io.opentelemetry.sdk.metrics.data.DoublePointData; -import io.opentelemetry.sdk.metrics.data.ExemplarData; -import io.opentelemetry.sdk.metrics.data.HistogramPointData; -import io.opentelemetry.sdk.metrics.data.LongExemplarData; -import io.opentelemetry.sdk.metrics.data.LongPointData; -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.data.MetricDataType; -import io.opentelemetry.sdk.metrics.data.PointData; -import io.opentelemetry.sdk.metrics.data.SummaryPointData; -import io.opentelemetry.sdk.metrics.data.ValueAtQuantile; -import io.opentelemetry.sdk.resources.Resource; -import java.io.BufferedWriter; -import java.io.IOException; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.io.UncheckedIOException; -import java.io.Writer; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.function.BiConsumer; -import java.util.function.Predicate; -import javax.annotation.Nullable; - -/** Serializes metrics into Prometheus exposition formats. */ -// Adapted from -// https://github.com/prometheus/client_java/blob/master/simpleclient_common/src/main/java/io/prometheus/client/exporter/common/TextFormat.java -abstract class Serializer { - static Serializer create(@Nullable String acceptHeader, Predicate filter) { - if (acceptHeader == null) { - return new Prometheus004Serializer(filter); - } - - for (String accepts : acceptHeader.split(",")) { - if ("application/openmetrics-text".equals(accepts.split(";")[0].trim())) { - return new OpenMetrics100Serializer(filter); - } - } - - return new Prometheus004Serializer(filter); - } - - private final Predicate metricNameFilter; - - Serializer(Predicate metricNameFilter) { - this.metricNameFilter = metricNameFilter; - } - - abstract String contentType(); - - abstract String headerName(String name, MetricData rawMetric, PrometheusType type); - - abstract void writeHelp(Writer writer, String description) throws IOException; - - abstract void writeTimestamp(Writer writer, long timestampNanos) throws IOException; - - abstract void writeExemplar( - Writer writer, - Collection exemplars, - double minExemplar, - double maxExemplar) - throws IOException; - - abstract void writeEof(Writer writer) throws IOException; - - final Set write(Collection metrics, OutputStream output) throws IOException { - Set conflictMetricNames = new HashSet<>(); - Map> metricsByName = new LinkedHashMap<>(); - Set scopes = new LinkedHashSet<>(); - // Iterate through metrics, filtering and grouping by headerName - for (MetricData metric : metrics) { - // Not supported in specification yet. - if (metric.getType() == MetricDataType.EXPONENTIAL_HISTOGRAM) { - continue; - } - // PrometheusHttpServer#getAggregationTemporality specifies cumulative temporality for - // all instruments, but non-SDK MetricProducers may not conform. We drop delta - // temporality metrics to avoid the complexity of stateful transformation to cumulative. - if (isDeltaTemporality(metric)) { - continue; - } - PrometheusType prometheusType = PrometheusType.forMetric(metric); - String metricName = PrometheusMetricNameMapper.INSTANCE.apply(metric, prometheusType); - // Skip metrics which do not pass metricNameFilter - if (!metricNameFilter.test(metricName)) { - continue; - } - List metricsWithHeaderName = - metricsByName.computeIfAbsent(metricName, unused -> new ArrayList<>()); - // Skip metrics with the same name but different type - if (metricsWithHeaderName.size() > 0 - && prometheusType != PrometheusType.forMetric(metricsWithHeaderName.get(0))) { - conflictMetricNames.add(metricName); - continue; - } - - metricsWithHeaderName.add(metric); - scopes.add(metric.getInstrumentationScopeInfo()); - } - - Optional optResource = metrics.stream().findFirst().map(MetricData::getResource); - try (Writer writer = - new BufferedWriter(new OutputStreamWriter(output, StandardCharsets.UTF_8))) { - if (optResource.isPresent()) { - writeResource(optResource.get(), writer); - } - for (InstrumentationScopeInfo scope : scopes) { - writeScopeInfo(scope, writer); - } - for (Map.Entry> entry : metricsByName.entrySet()) { - write(entry.getValue(), entry.getKey(), writer); - } - writeEof(writer); - } - return conflictMetricNames; - } - - private void write(List metrics, String metricName, Writer writer) - throws IOException { - // Write header based on first metric - MetricData first = metrics.get(0); - PrometheusType type = PrometheusType.forMetric(first); - String headerName = headerName(metricName, first, type); - String description = metrics.get(0).getDescription(); - - writer.write("# TYPE "); - writer.write(headerName); - writer.write(' '); - writer.write(type.getTypeString()); - writer.write('\n'); - - writer.write("# HELP "); - writer.write(headerName); - writer.write(' '); - writeHelp(writer, description); - writer.write('\n'); - - // Then write the metrics. - for (MetricData metric : metrics) { - write(metric, metricName, writer); - } - } - - private void write(MetricData metric, String metricName, Writer writer) throws IOException { - for (PointData point : getPoints(metric)) { - switch (metric.getType()) { - case DOUBLE_SUM: - case DOUBLE_GAUGE: - writePoint( - writer, - metric.getInstrumentationScopeInfo(), - metricName, - ((DoublePointData) point).getValue(), - point.getAttributes(), - point.getEpochNanos()); - break; - case LONG_SUM: - case LONG_GAUGE: - writePoint( - writer, - metric.getInstrumentationScopeInfo(), - metricName, - (double) ((LongPointData) point).getValue(), - point.getAttributes(), - point.getEpochNanos()); - break; - case HISTOGRAM: - writeHistogram( - writer, metric.getInstrumentationScopeInfo(), metricName, (HistogramPointData) point); - break; - case SUMMARY: - writeSummary( - writer, metric.getInstrumentationScopeInfo(), metricName, (SummaryPointData) point); - break; - case EXPONENTIAL_HISTOGRAM: - throw new IllegalArgumentException("Can't happen"); - } - } - } - - private static boolean isDeltaTemporality(MetricData metricData) { - switch (metricData.getType()) { - case LONG_GAUGE: - case DOUBLE_GAUGE: - case SUMMARY: - return false; - case LONG_SUM: - return metricData.getLongSumData().getAggregationTemporality() - == AggregationTemporality.DELTA; - case DOUBLE_SUM: - return metricData.getDoubleSumData().getAggregationTemporality() - == AggregationTemporality.DELTA; - case HISTOGRAM: - return metricData.getHistogramData().getAggregationTemporality() - == AggregationTemporality.DELTA; - default: - } - throw new IllegalArgumentException("Can't happen"); - } - - private static void writeResource(Resource resource, Writer writer) throws IOException { - writer.write("# TYPE target info\n"); - writer.write("# HELP target Target metadata\n"); - writer.write("target_info{"); - writeAttributePairs(writer, /* initialComma= */ false, resource.getAttributes()); - writer.write("} 1\n"); - } - - private static void writeScopeInfo( - InstrumentationScopeInfo instrumentationScopeInfo, Writer writer) throws IOException { - writer.write("# TYPE otel_scope_info info\n"); - writer.write("# HELP otel_scope_info Scope metadata\n"); - writer.write("otel_scope_info{"); - writeScopeNameAndVersion(writer, instrumentationScopeInfo); - writeAttributePairs(writer, /* initialComma= */ true, instrumentationScopeInfo.getAttributes()); - writer.write("} 1\n"); - } - - private void writeHistogram( - Writer writer, - InstrumentationScopeInfo instrumentationScopeInfo, - String name, - HistogramPointData point) - throws IOException { - writePoint( - writer, - instrumentationScopeInfo, - name + "_count", - (double) point.getCount(), - point.getAttributes(), - point.getEpochNanos()); - writePoint( - writer, - instrumentationScopeInfo, - name + "_sum", - point.getSum(), - point.getAttributes(), - point.getEpochNanos()); - - long cumulativeCount = 0; - List counts = point.getCounts(); - for (int i = 0; i < counts.size(); i++) { - // This is the upper boundary (inclusive). I.e. all values should be < this value (LE - - // Less-then-or-Equal). - double boundary = getBucketUpperBound(point, i); - - cumulativeCount += counts.get(i); - writePoint( - writer, - instrumentationScopeInfo, - name + "_bucket", - (double) cumulativeCount, - point.getAttributes(), - point.getEpochNanos(), - "le", - boundary, - point.getExemplars(), - getBucketLowerBound(point, i), - boundary); - } - } - - /** - * Returns the lower bound of a bucket (all values would have been greater than). - * - * @param bucketIndex The bucket index, should match {@link HistogramPointData#getCounts()} index. - */ - static double getBucketLowerBound(HistogramPointData point, int bucketIndex) { - return bucketIndex > 0 ? point.getBoundaries().get(bucketIndex - 1) : Double.NEGATIVE_INFINITY; - } - - /** - * Returns the upper inclusive bound of a bucket (all values would have been less then or equal). - * - * @param bucketIndex The bucket index, should match {@link HistogramPointData#getCounts()} index. - */ - static double getBucketUpperBound(HistogramPointData point, int bucketIndex) { - List boundaries = point.getBoundaries(); - return (bucketIndex < boundaries.size()) - ? boundaries.get(bucketIndex) - : Double.POSITIVE_INFINITY; - } - - private void writeSummary( - Writer writer, - InstrumentationScopeInfo instrumentationScopeInfo, - String name, - SummaryPointData point) - throws IOException { - writePoint( - writer, - instrumentationScopeInfo, - name + "_count", - (double) point.getCount(), - point.getAttributes(), - point.getEpochNanos()); - writePoint( - writer, - instrumentationScopeInfo, - name + "_sum", - point.getSum(), - point.getAttributes(), - point.getEpochNanos()); - - List valueAtQuantiles = point.getValues(); - for (ValueAtQuantile valueAtQuantile : valueAtQuantiles) { - writePoint( - writer, - instrumentationScopeInfo, - name, - valueAtQuantile.getValue(), - point.getAttributes(), - point.getEpochNanos(), - "quantile", - valueAtQuantile.getQuantile(), - Collections.emptyList(), - 0, - 0); - } - } - - private void writePoint( - Writer writer, - InstrumentationScopeInfo instrumentationScopeInfo, - String name, - double value, - Attributes attributes, - long epochNanos) - throws IOException { - writer.write(name); - writeAttributes(writer, instrumentationScopeInfo, attributes); - writer.write(' '); - writeDouble(writer, value); - writer.write(' '); - writeTimestamp(writer, epochNanos); - writer.write('\n'); - } - - private void writePoint( - Writer writer, - InstrumentationScopeInfo instrumentationScopeInfo, - String name, - double value, - Attributes attributes, - long epochNanos, - String additionalAttrKey, - double additionalAttrValue, - Collection exemplars, - double minExemplar, - double maxExemplar) - throws IOException { - writer.write(name); - writeAttributes( - writer, instrumentationScopeInfo, attributes, additionalAttrKey, additionalAttrValue); - writer.write(' '); - writeDouble(writer, value); - writer.write(' '); - writeTimestamp(writer, epochNanos); - writeExemplar(writer, exemplars, minExemplar, maxExemplar); - writer.write('\n'); - } - - private static void writeAttributes( - Writer writer, InstrumentationScopeInfo instrumentationScopeInfo, Attributes attributes) - throws IOException { - writer.write('{'); - writeScopeNameAndVersion(writer, instrumentationScopeInfo); - if (!attributes.isEmpty()) { - writeAttributePairs(writer, /* initialComma= */ true, attributes); - } - writer.write('}'); - } - - private static void writeAttributes( - Writer writer, - InstrumentationScopeInfo instrumentationScopeInfo, - Attributes attributes, - String additionalAttrKey, - double additionalAttrValue) - throws IOException { - writer.write('{'); - writeScopeNameAndVersion(writer, instrumentationScopeInfo); - writer.write(','); - if (!attributes.isEmpty()) { - writeAttributePairs(writer, /* initialComma= */ false, attributes); - writer.write(','); - } - writer.write(additionalAttrKey); - writer.write("=\""); - writeDouble(writer, additionalAttrValue); - writer.write('"'); - writer.write('}'); - } - - private static void writeScopeNameAndVersion( - Writer writer, InstrumentationScopeInfo instrumentationScopeInfo) throws IOException { - writer.write("otel_scope_name=\""); - writer.write(instrumentationScopeInfo.getName()); - writer.write("\""); - if (instrumentationScopeInfo.getVersion() != null) { - writer.write(",otel_scope_version=\""); - writer.write(instrumentationScopeInfo.getVersion()); - writer.write("\""); - } - } - - private static void writeAttributePairs( - Writer writer, boolean initialComma, Attributes attributes) throws IOException { - try { - attributes.forEach( - new BiConsumer, Object>() { - private boolean prefixWithComma = initialComma; - - @Override - public void accept(AttributeKey key, Object value) { - try { - if (prefixWithComma) { - writer.write(','); - } else { - prefixWithComma = true; - } - writer.write(NameSanitizer.INSTANCE.apply(key.getKey())); - writer.write("=\""); - writeEscapedLabelValue(writer, value.toString()); - writer.write('"'); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - }); - } catch (UncheckedIOException e) { - throw e.getCause(); - } - } - - private static void writeDouble(Writer writer, double d) throws IOException { - if (d == Double.POSITIVE_INFINITY) { - writer.write("+Inf"); - } else if (d == Double.NEGATIVE_INFINITY) { - writer.write("-Inf"); - } else { - writer.write(Double.toString(d)); - } - } - - static void writeEscapedLabelValue(Writer writer, String s) throws IOException { - for (int i = 0; i < s.length(); i++) { - char c = s.charAt(i); - switch (c) { - case '\\': - writer.write("\\\\"); - break; - case '\"': - writer.write("\\\""); - break; - case '\n': - writer.write("\\n"); - break; - default: - writer.write(c); - } - } - } - - static class Prometheus004Serializer extends Serializer { - - Prometheus004Serializer(Predicate metricNameFilter) { - super(metricNameFilter); - } - - @Override - String contentType() { - return "text/plain; version=0.0.4; charset=utf-8"; - } - - @Override - String headerName(String name, MetricData rawMetric, PrometheusType type) { - return name; - } - - @Override - void writeHelp(Writer writer, String help) throws IOException { - for (int i = 0; i < help.length(); i++) { - char c = help.charAt(i); - switch (c) { - case '\\': - writer.write("\\\\"); - break; - case '\n': - writer.write("\\n"); - break; - default: - writer.write(c); - } - } - } - - @Override - void writeTimestamp(Writer writer, long timestampNanos) throws IOException { - writer.write(Long.toString(TimeUnit.NANOSECONDS.toMillis(timestampNanos))); - } - - @Override - void writeExemplar( - Writer writer, - Collection exemplars, - double minExemplar, - double maxExemplar) { - // Don't write exemplars - } - - @Override - void writeEof(Writer writer) { - // Don't write EOF - } - } - - static class OpenMetrics100Serializer extends Serializer { - - OpenMetrics100Serializer(Predicate metricNameFilter) { - super(metricNameFilter); - } - - @Override - String contentType() { - return "application/openmetrics-text; version=1.0.0; charset=utf-8"; - } - - @Override - String headerName(String name, MetricData rawMetric, PrometheusType type) { - // If the name didn't originally have a _total suffix, and we added it later, omit it from the - // header. - String sanitizedOriginalName = NameSanitizer.INSTANCE.apply(rawMetric.getName()); - if (!sanitizedOriginalName.endsWith("_total") && (type == PrometheusType.COUNTER)) { - return name.substring(0, name.length() - "_total".length()); - } - return name; - } - - @Override - void writeHelp(Writer writer, String description) throws IOException { - writeEscapedLabelValue(writer, description); - } - - @Override - void writeTimestamp(Writer writer, long timestampNanos) throws IOException { - long timestampMillis = TimeUnit.NANOSECONDS.toMillis(timestampNanos); - writer.write(Long.toString(timestampMillis / 1000)); - writer.write("."); - long millis = timestampMillis % 1000; - if (millis < 100) { - writer.write('0'); - } - if (millis < 10) { - writer.write('0'); - } - writer.write(Long.toString(millis)); - } - - @Override - void writeExemplar( - Writer writer, - Collection exemplars, - double minExemplar, - double maxExemplar) - throws IOException { - for (ExemplarData exemplar : exemplars) { - double value = getExemplarValue(exemplar); - if (value > minExemplar && value <= maxExemplar) { - writer.write(" # {"); - SpanContext spanContext = exemplar.getSpanContext(); - if (spanContext.isValid()) { - // NB: Output sorted to match prometheus client library even though it shouldn't matter. - // OTel generally outputs in trace_id span_id order though so we can consider breaking - // from reference implementation if it makes sense. - writer.write("span_id=\""); - writer.write(spanContext.getSpanId()); - writer.write("\",trace_id=\""); - writer.write(spanContext.getTraceId()); - writer.write('"'); - } - writer.write("} "); - writeDouble(writer, value); - writer.write(' '); - writeTimestamp(writer, exemplar.getEpochNanos()); - // Only write one exemplar. - return; - } - } - } - - @Override - void writeEof(Writer writer) throws IOException { - writer.write("# EOF\n"); - } - } - - static Collection getPoints(MetricData metricData) { - switch (metricData.getType()) { - case DOUBLE_GAUGE: - return metricData.getDoubleGaugeData().getPoints(); - case DOUBLE_SUM: - return metricData.getDoubleSumData().getPoints(); - case LONG_GAUGE: - return metricData.getLongGaugeData().getPoints(); - case LONG_SUM: - return metricData.getLongSumData().getPoints(); - case SUMMARY: - return metricData.getSummaryData().getPoints(); - case HISTOGRAM: - return metricData.getHistogramData().getPoints(); - case EXPONENTIAL_HISTOGRAM: - return metricData.getExponentialHistogramData().getPoints(); - } - return Collections.emptyList(); - } - - private static double getExemplarValue(ExemplarData exemplar) { - return exemplar instanceof DoubleExemplarData - ? ((DoubleExemplarData) exemplar).getValue() - : (double) ((LongExemplarData) exemplar).getValue(); - } -} diff --git a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/internal/PrometheusComponentProvider.java b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/internal/PrometheusComponentProvider.java new file mode 100644 index 00000000000..04b8094608a --- /dev/null +++ b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/internal/PrometheusComponentProvider.java @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.prometheus.internal; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.prometheus.PrometheusHttpServer; +import io.opentelemetry.exporter.prometheus.PrometheusHttpServerBuilder; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.metrics.export.MetricReader; + +/** + * Declarative configuration SPI implementation for {@link PrometheusHttpServer}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public class PrometheusComponentProvider implements ComponentProvider { + + @Override + public Class getType() { + return MetricReader.class; + } + + @Override + public String getName() { + return "prometheus"; + } + + @Override + public MetricReader create(DeclarativeConfigProperties config) { + PrometheusHttpServerBuilder prometheusBuilder = PrometheusHttpServer.builder(); + + Integer port = config.getInt("port"); + if (port != null) { + prometheusBuilder.setPort(port); + } + String host = config.getString("host"); + if (host != null) { + prometheusBuilder.setHost(host); + } + + return prometheusBuilder.build(); + } +} diff --git a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/internal/PrometheusCustomizerProvider.java b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/internal/PrometheusCustomizerProvider.java deleted file mode 100644 index f657e14eea7..00000000000 --- a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/internal/PrometheusCustomizerProvider.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.prometheus.internal; - -import io.opentelemetry.exporter.prometheus.PrometheusHttpServer; -import io.opentelemetry.exporter.prometheus.PrometheusHttpServerBuilder; -import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer; -import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; - -/** - * SPI implementation for {@link PrometheusHttpServer}. - * - *

This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time. - */ -public class PrometheusCustomizerProvider implements AutoConfigurationCustomizerProvider { - - @Override - public void customize(AutoConfigurationCustomizer autoConfiguration) { - autoConfiguration.addMeterProviderCustomizer( - (builder, config) -> { - boolean prometheusEnabled = - config.getList("otel.metrics.exporter").contains("prometheus"); - if (prometheusEnabled) { - builder.registerMetricReader(configurePrometheusHttpServer(config)); - } - return builder; - }); - } - - // Visible for test - static PrometheusHttpServer configurePrometheusHttpServer(ConfigProperties config) { - PrometheusHttpServerBuilder prometheusBuilder = PrometheusHttpServer.builder(); - - Integer port = config.getInt("otel.exporter.prometheus.port"); - if (port != null) { - prometheusBuilder.setPort(port); - } - String host = config.getString("otel.exporter.prometheus.host"); - if (host != null) { - prometheusBuilder.setHost(host); - } - return prometheusBuilder.build(); - } -} diff --git a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/internal/PrometheusMetricReaderProvider.java b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/internal/PrometheusMetricReaderProvider.java index 062dd1e7ccd..4c60d3def19 100644 --- a/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/internal/PrometheusMetricReaderProvider.java +++ b/exporters/prometheus/src/main/java/io/opentelemetry/exporter/prometheus/internal/PrometheusMetricReaderProvider.java @@ -5,6 +5,7 @@ package io.opentelemetry.exporter.prometheus.internal; +import io.opentelemetry.exporter.internal.ExporterBuilderUtil; import io.opentelemetry.exporter.prometheus.PrometheusHttpServer; import io.opentelemetry.exporter.prometheus.PrometheusHttpServerBuilder; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; @@ -31,6 +32,17 @@ public MetricReader createMetricReader(ConfigProperties config) { if (host != null) { prometheusBuilder.setHost(host); } + + ExporterBuilderUtil.configureExporterMemoryMode(config, prometheusBuilder::setMemoryMode); + + String defaultHistogramAggregation = + config.getString( + "otel.java.experimental.exporter.prometheus.metrics.default.histogram.aggregation"); + if (defaultHistogramAggregation != null) { + ExporterBuilderUtil.configureHistogramDefaultAggregation( + defaultHistogramAggregation, prometheusBuilder::setDefaultAggregationSelector); + } + return prometheusBuilder.build(); } diff --git a/exporters/prometheus/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider b/exporters/prometheus/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider deleted file mode 100644 index 1522bd5c348..00000000000 --- a/exporters/prometheus/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider +++ /dev/null @@ -1 +0,0 @@ -io.opentelemetry.exporter.prometheus.internal.PrometheusCustomizerProvider \ No newline at end of file diff --git a/exporters/prometheus/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider b/exporters/prometheus/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider new file mode 100644 index 00000000000..f3c72966e4b --- /dev/null +++ b/exporters/prometheus/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider @@ -0,0 +1 @@ +io.opentelemetry.exporter.prometheus.internal.PrometheusComponentProvider diff --git a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/CollectorIntegrationTest.java b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/CollectorIntegrationTest.java index 07cee01a801..f58fdf722a2 100644 --- a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/CollectorIntegrationTest.java +++ b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/CollectorIntegrationTest.java @@ -56,6 +56,7 @@ * running in process, allowing assertions to be made against the data. */ @Testcontainers(disabledWithoutDocker = true) +@SuppressWarnings("NonFinalStaticField") class CollectorIntegrationTest { private static final String COLLECTOR_IMAGE = @@ -133,9 +134,16 @@ void endToEnd() { // Resource attributes derived from the prometheus scrape config stringKeyValue("service.name", "app"), stringKeyValue("service.instance.id", "host.testcontainers.internal:" + prometheusPort), + // net.host.name, net.host.port and http.scheme are superseded by server.address, + // server.port, and url.scheme respectively and will be removed by default in a future + // collector release + // https://github.com/open-telemetry/opentelemetry-collector-contrib/pull/32829 stringKeyValue("net.host.name", "host.testcontainers.internal"), stringKeyValue("net.host.port", String.valueOf(prometheusPort)), stringKeyValue("http.scheme", "http"), + stringKeyValue("server.address", "host.testcontainers.internal"), + stringKeyValue("server.port", String.valueOf(prometheusPort)), + stringKeyValue("url.scheme", "http"), // Resource attributes from the metric SDK resource translated to target_info stringKeyValue( "service_name", diff --git a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/NameSanitizerTest.java b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/NameSanitizerTest.java deleted file mode 100644 index 56eb36f085b..00000000000 --- a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/NameSanitizerTest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.prometheus; - -import static org.assertj.core.api.Assertions.assertThat; - -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Function; -import java.util.stream.Stream; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -class NameSanitizerTest { - - @Test - void testSanitizerCaching() { - AtomicInteger count = new AtomicInteger(); - Function delegate = labelName -> labelName + count.incrementAndGet(); - NameSanitizer sanitizer = new NameSanitizer(delegate); - String labelName = "http.name"; - - assertThat(sanitizer.apply(labelName)).isEqualTo("http.name1"); - assertThat(sanitizer.apply(labelName)).isEqualTo("http.name1"); - assertThat(sanitizer.apply(labelName)).isEqualTo("http.name1"); - assertThat(sanitizer.apply(labelName)).isEqualTo("http.name1"); - assertThat(sanitizer.apply(labelName)).isEqualTo("http.name1"); - assertThat(count).hasValue(1); - } - - @ParameterizedTest - @MethodSource("provideMetricNamesForTest") - void testSanitizerCleansing(String unsanitizedName, String sanitizedName) { - Assertions.assertEquals(sanitizedName, NameSanitizer.INSTANCE.apply(unsanitizedName)); - } - - private static Stream provideMetricNamesForTest() { - return Stream.of( - // valid name - already sanitized - Arguments.of( - "active_directory_ds_replication_network_io", - "active_directory_ds_replication_network_io"), - // consecutive underscores - Arguments.of("cpu_sp__d_hertz", "cpu_sp_d_hertz"), - // leading and trailing underscores - should be fine - Arguments.of("_cpu_speed_hertz_", "_cpu_speed_hertz_"), - // unsupported characters replaced - Arguments.of("metric_unit_$1000", "metric_unit_1000"), - // multiple unsupported characters - whitespace - Arguments.of("sample_me%%$$$_count_ !!@unit include", "sample_me_count_unit_include"), - // metric names cannot start with a number - Arguments.of("1_some_metric_name", "_some_metric_name"), - // metric names can have : - Arguments.of("sample_metric_name__:_per_meter", "sample_metric_name_:_per_meter"), - // Illegal characters - Arguments.of("cpu_sp$$d_hertz", "cpu_sp_d_hertz")); - } -} diff --git a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/Otel2PrometheusConverterTest.java b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/Otel2PrometheusConverterTest.java new file mode 100644 index 00000000000..5b8dd270548 --- /dev/null +++ b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/Otel2PrometheusConverterTest.java @@ -0,0 +1,481 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.prometheus; + +import static io.opentelemetry.api.common.AttributeKey.stringKey; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.data.MetricDataType; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramBuckets; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryPointData; +import io.opentelemetry.sdk.resources.Resource; +import io.prometheus.metrics.expositionformats.ExpositionFormats; +import io.prometheus.metrics.model.snapshots.MetricSnapshots; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Predicate; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nullable; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class Otel2PrometheusConverterTest { + + private static final Pattern PATTERN = + Pattern.compile( + "# HELP (?.*)\n# TYPE (?.*)\n(?.*)\\{otel_scope_name=\"scope\"}(.|\\n)*"); + + private final Otel2PrometheusConverter converter = + new Otel2PrometheusConverter(true, /* allowedResourceAttributesFilter= */ null); + + @ParameterizedTest + @MethodSource("metricMetadataArgs") + void metricMetadata( + MetricData metricData, String expectedType, String expectedHelp, String expectedMetricName) + throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + MetricSnapshots snapshots = converter.convert(Collections.singletonList(metricData)); + ExpositionFormats.init().getPrometheusTextFormatWriter().write(out, snapshots); + String expositionFormat = new String(out.toByteArray(), StandardCharsets.UTF_8); + + // Uncomment to debug exposition format output + // System.out.println(expositionFormat); + + Matcher matcher = PATTERN.matcher(expositionFormat); + assertThat(matcher.matches()).isTrue(); + assertThat(matcher.group("help")).isEqualTo(expectedHelp); + assertThat(matcher.group("type")).isEqualTo(expectedType); + // Note: Summaries and histograms produce output which matches METRIC_NAME_PATTERN multiple + // times. The pattern ends up matching against the first. + assertThat(matcher.group("metricName")).isEqualTo(expectedMetricName); + } + + @ParameterizedTest + @MethodSource("resourceAttributesAdditionArgs") + void resourceAttributesAddition( + MetricData metricData, + @Nullable Predicate allowedResourceAttributesFilter, + String metricName, + String expectedMetricLabels) + throws IOException { + + Otel2PrometheusConverter converter = + new Otel2PrometheusConverter(true, allowedResourceAttributesFilter); + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + MetricSnapshots snapshots = converter.convert(Collections.singletonList(metricData)); + ExpositionFormats.init().getPrometheusTextFormatWriter().write(out, snapshots); + String expositionFormat = new String(out.toByteArray(), StandardCharsets.UTF_8); + + // extract the only metric line + List metricLines = + Arrays.stream(expositionFormat.split("\n")) + .filter(line -> line.startsWith(metricName)) + .collect(Collectors.toList()); + assertThat(metricLines).hasSize(1); + String metricLine = metricLines.get(0); + + String metricLabels = + metricLine.substring(metricLine.indexOf("{") + 1, metricLine.indexOf("}")); + assertThat(metricLabels).isEqualTo(expectedMetricLabels); + } + + @Test + void prometheusNameCollisionTest_Issue6277() { + // NOTE: Metrics with the same resolved prometheus name should merge. However, + // Otel2PrometheusConverter is not responsible for merging individual series, so the merge will + // fail if the two different metrics contain overlapping series. Users should deal with this by + // adding a view that renames one of the two metrics such that the conflict does not occur. + MetricData dotName = + createSampleMetricData( + "my.metric", + "units", + MetricDataType.LONG_SUM, + Attributes.builder().put("key", "a").build(), + Resource.create(Attributes.empty())); + MetricData underscoreName = + createSampleMetricData( + "my_metric", + "units", + MetricDataType.LONG_SUM, + Attributes.builder().put("key", "b").build(), + Resource.create(Attributes.empty())); + + List metricData = new ArrayList<>(); + metricData.add(dotName); + metricData.add(underscoreName); + + assertThatCode(() -> converter.convert(metricData)).doesNotThrowAnyException(); + } + + private static Stream resourceAttributesAdditionArgs() { + List arguments = new ArrayList<>(); + + for (MetricDataType metricDataType : MetricDataType.values()) { + // Check that resource attributes are added as labels, according to allowed pattern + arguments.add( + Arguments.of( + createSampleMetricData( + "my.metric", + "units", + metricDataType, + Attributes.of(stringKey("foo1"), "bar1", stringKey("foo2"), "bar2"), + Resource.create( + Attributes.of( + stringKey("host"), "localhost", stringKey("cluster"), "mycluster"))), + /* allowedResourceAttributesFilter= */ Predicates.startsWith("clu"), + metricDataType == MetricDataType.SUMMARY + || metricDataType == MetricDataType.HISTOGRAM + || metricDataType == MetricDataType.EXPONENTIAL_HISTOGRAM + ? "my_metric_units_count" + : "my_metric_units", + + // "cluster" attribute is added (due to reg expr specified) and only it + "cluster=\"mycluster\",foo1=\"bar1\",foo2=\"bar2\",otel_scope_name=\"scope\"")); + } + + // Resource attributes which also exists in the metric labels are not added twice + arguments.add( + Arguments.of( + createSampleMetricData( + "my.metric", + "units", + MetricDataType.LONG_SUM, + Attributes.of(stringKey("cluster"), "mycluster2", stringKey("foo2"), "bar2"), + Resource.create( + Attributes.of( + stringKey("host"), "localhost", stringKey("cluster"), "mycluster"))), + /* allowedResourceAttributesFilter= */ Predicates.startsWith("clu"), + "my_metric_units", + + // "cluster" attribute is present only once and the value is taken + // from the metric attributes and not the resource attributes + "cluster=\"mycluster2\",foo2=\"bar2\",otel_scope_name=\"scope\"")); + + // Empty attributes + arguments.add( + Arguments.of( + createSampleMetricData( + "my.metric", + "units", + MetricDataType.LONG_SUM, + Attributes.empty(), + Resource.create( + Attributes.of( + stringKey("host"), "localhost", stringKey("cluster"), "mycluster"))), + /* allowedResourceAttributesFilter= */ Predicates.startsWith("clu"), + "my_metric_units", + "cluster=\"mycluster\",otel_scope_name=\"scope\"")); + + return arguments.stream(); + } + + private static Stream metricMetadataArgs() { + return Stream.of( + // the unity unit "1" is translated to "ratio" + Arguments.of( + createSampleMetricData("sample", "1", MetricDataType.LONG_GAUGE), + "sample_ratio gauge", + "sample_ratio description", + "sample_ratio"), + // unit is appended to metric name + Arguments.of( + createSampleMetricData("sample", "unit", MetricDataType.LONG_GAUGE), + "sample_unit gauge", + "sample_unit description", + "sample_unit"), + // units in curly braces are dropped + Arguments.of( + createSampleMetricData("sample", "1{dropped}", MetricDataType.LONG_GAUGE), + "sample_ratio gauge", + "sample_ratio description", + "sample_ratio"), + // monotonic sums always include _total suffix + Arguments.of( + createSampleMetricData("sample", "unit", MetricDataType.LONG_SUM), + "sample_unit_total counter", + "sample_unit_total description", + "sample_unit_total"), + Arguments.of( + createSampleMetricData("sample", "1", MetricDataType.LONG_SUM), + "sample_ratio_total counter", + "sample_ratio_total description", + "sample_ratio_total"), + // units expressed as numbers other than 1 are retained + Arguments.of( + createSampleMetricData("sample", "2", MetricDataType.LONG_SUM), + "sample_2_total counter", + "sample_2_total description", + "sample_2_total"), + Arguments.of( + createSampleMetricData("metric_name", "2", MetricDataType.SUMMARY), + "metric_name_2 summary", + "metric_name_2 description", + "metric_name_2_count"), + // unsupported characters are translated to "_", repeated "_" are dropped + Arguments.of( + createSampleMetricData("s%%ple", "%/min", MetricDataType.SUMMARY), + "s_ple_percent_per_minute summary", + "s_ple_percent_per_minute description", + "s_ple_percent_per_minute_count"), + // metric unit is not appended if the name already contains the unit + Arguments.of( + createSampleMetricData("metric_name_total", "total", MetricDataType.LONG_SUM), + "metric_name_total counter", + "metric_name_total description", + "metric_name_total"), + // total suffix is stripped because total is a reserved suffixed for monotonic sums + Arguments.of( + createSampleMetricData("metric_name_total", "total", MetricDataType.SUMMARY), + "metric_name summary", + "metric_name description", + "metric_name_count"), + // if metric name ends with unit the unit is omitted + Arguments.of( + createSampleMetricData("metric_name_ratio", "1", MetricDataType.LONG_GAUGE), + "metric_name_ratio gauge", + "metric_name_ratio description", + "metric_name_ratio"), + Arguments.of( + createSampleMetricData("metric_name_ratio", "1", MetricDataType.SUMMARY), + "metric_name_ratio summary", + "metric_name_ratio description", + "metric_name_ratio_count"), + Arguments.of( + createSampleMetricData("metric_hertz", "hertz", MetricDataType.LONG_GAUGE), + "metric_hertz gauge", + "metric_hertz description", + "metric_hertz"), + Arguments.of( + createSampleMetricData("metric_hertz", "hertz", MetricDataType.LONG_SUM), + "metric_hertz_total counter", + "metric_hertz_total description", + "metric_hertz_total"), + // if metric name ends with unit the unit is omitted - order matters + Arguments.of( + createSampleMetricData("metric_total_hertz", "hertz_total", MetricDataType.LONG_SUM), + "metric_total_hertz_total counter", + "metric_total_hertz_total description", + "metric_total_hertz_total"), + // metric name cannot start with a number + Arguments.of( + createSampleMetricData("2_metric_name", "By", MetricDataType.SUMMARY), + "_metric_name_bytes summary", + "_metric_name_bytes description", + "_metric_name_bytes_count")); + } + + static MetricData createSampleMetricData( + String metricName, Resource resource, Attributes attributes) { + return createSampleMetricData( + metricName, "unit", MetricDataType.LONG_SUM, attributes, resource); + } + + static MetricData createSampleMetricData( + String metricName, String metricUnit, MetricDataType metricDataType) { + return createSampleMetricData(metricName, metricUnit, metricDataType, null, null); + } + + static MetricData createSampleMetricData( + String metricName, + String metricUnit, + MetricDataType metricDataType, + @Nullable Attributes attributes, + @Nullable Resource resource) { + Attributes attributesToUse = attributes == null ? Attributes.empty() : attributes; + Resource resourceToUse = resource == null ? Resource.getDefault() : resource; + + switch (metricDataType) { + case SUMMARY: + return ImmutableMetricData.createDoubleSummary( + resourceToUse, + InstrumentationScopeInfo.create("scope"), + metricName, + "description", + metricUnit, + ImmutableSummaryData.create( + Collections.singletonList( + ImmutableSummaryPointData.create( + 0, 1, attributesToUse, 1, 1, Collections.emptyList())))); + case LONG_SUM: + return ImmutableMetricData.createLongSum( + resourceToUse, + InstrumentationScopeInfo.create("scope"), + metricName, + "description", + metricUnit, + ImmutableSumData.create( + true, + AggregationTemporality.CUMULATIVE, + Collections.singletonList( + ImmutableLongPointData.create(0, 1, attributesToUse, 1L)))); + case DOUBLE_SUM: + return ImmutableMetricData.createDoubleSum( + resourceToUse, + InstrumentationScopeInfo.create("scope"), + metricName, + "description", + metricUnit, + ImmutableSumData.create( + true, + AggregationTemporality.CUMULATIVE, + Collections.singletonList( + ImmutableDoublePointData.create(0, 1, attributesToUse, 1.0)))); + case LONG_GAUGE: + return ImmutableMetricData.createLongGauge( + resourceToUse, + InstrumentationScopeInfo.create("scope"), + metricName, + "description", + metricUnit, + ImmutableGaugeData.create( + Collections.singletonList( + ImmutableLongPointData.create(0, 1, attributesToUse, 1L)))); + case DOUBLE_GAUGE: + return ImmutableMetricData.createDoubleGauge( + resourceToUse, + InstrumentationScopeInfo.create("scope"), + metricName, + "description", + metricUnit, + ImmutableGaugeData.create( + Collections.singletonList( + ImmutableDoublePointData.create(0, 1, attributesToUse, 1.0f)))); + case HISTOGRAM: + return ImmutableMetricData.createDoubleHistogram( + resourceToUse, + InstrumentationScopeInfo.create("scope"), + metricName, + "description", + metricUnit, + ImmutableHistogramData.create( + AggregationTemporality.CUMULATIVE, + Collections.singletonList( + ImmutableHistogramPointData.create( + 0, + 1, + attributesToUse, + 1, + false, + -1, + false, + -1, + Collections.singletonList(1.0), + Arrays.asList(0L, 1L))))); + case EXPONENTIAL_HISTOGRAM: + return ImmutableMetricData.createExponentialHistogram( + resourceToUse, + InstrumentationScopeInfo.create("scope"), + metricName, + "description", + metricUnit, + ImmutableExponentialHistogramData.create( + AggregationTemporality.CUMULATIVE, + Collections.singletonList( + ImmutableExponentialHistogramPointData.create( + 0, + 1, + 5, + false, + 1, + false, + 1, + ImmutableExponentialHistogramBuckets.create( + 2, 5, Arrays.asList(1L, 2L, 3L, 4L, 5L)), + ImmutableExponentialHistogramBuckets.create( + 2, 5, Arrays.asList(1L, 2L, 3L, 4L, 5L)), + 0, + 10, + attributesToUse, + Collections.emptyList())))); + } + + throw new IllegalArgumentException("Unsupported metric data type: " + metricDataType); + } + + @Test + void validateCacheIsBounded() { + AtomicInteger predicateCalledCount = new AtomicInteger(); + Predicate countPredicate = + s -> { + predicateCalledCount.addAndGet(1); + return true; + }; + + Otel2PrometheusConverter otel2PrometheusConverter = + new Otel2PrometheusConverter(true, /* allowedResourceAttributesFilter= */ countPredicate); + + // Create 20 different metric data objects with 2 different resource attributes; + Resource resource1 = Resource.builder().put("cluster", "cluster1").build(); + Resource resource2 = Resource.builder().put("cluster", "cluster2").build(); + + List metricDataList = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + Attributes attributes = Attributes.of(stringKey("foo" + i), "bar" + i); + metricDataList.add(createSampleMetricData("metric1", resource1, attributes)); + metricDataList.add(createSampleMetricData("metric2", resource2, attributes)); + } + + otel2PrometheusConverter.convert(metricDataList); + + // The predicate should be called only once for each resource attribute, and we have + // 2 unique resources, each with 1 attribute, so 2. + assertThat(predicateCalledCount.get()).isEqualTo(2); + + metricDataList.clear(); + + // Create 20 different metric data objects with 20 different resource attributes; + // This should cause the cache to be full, and then subsequently cleared + for (int i = 0; i < Otel2PrometheusConverter.MAX_CACHE_SIZE; i++) { + Attributes attributes = Attributes.of(stringKey("foo" + i), "bar" + i); + Resource resource = Resource.builder().put("cluster", "different-cluster" + i).build(); + metricDataList.add(createSampleMetricData("metric1", resource, attributes)); + metricDataList.add(createSampleMetricData("metric2", resource, attributes)); + } + otel2PrometheusConverter.convert(metricDataList); + + // Now lets put metrics with the same resource attributes as before + metricDataList.clear(); + predicateCalledCount.set(0); + for (int i = 0; i < 10; i++) { + Attributes attributes = Attributes.of(stringKey("foo" + i), "bar" + i); + metricDataList.add(createSampleMetricData("metric1", resource1, attributes)); + metricDataList.add(createSampleMetricData("metric2", resource2, attributes)); + } + otel2PrometheusConverter.convert(metricDataList); + + // If the cache was unbounded, the predicate should be 0, since it's all in the cache, + // but if the cache was cleared, it used the predicate for each resource, since it as if + // it never saw those resources before. + assertThat(predicateCalledCount.get()).isEqualTo(2); + } +} diff --git a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/Predicates.java b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/Predicates.java new file mode 100644 index 00000000000..8fdcc75c6c4 --- /dev/null +++ b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/Predicates.java @@ -0,0 +1,24 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.prometheus; + +import java.util.function.Predicate; + +public final class Predicates { + + static final Predicate ALLOW_ALL = attributeKey -> true; + + private Predicates() {} + + @SuppressWarnings("SameParameterValue") + static Predicate startsWith(String prefix) { + return attributeKey -> attributeKey.startsWith(prefix); + } + + public static Predicate is(String value) { + return attributeKey -> attributeKey.equals(value); + } +} diff --git a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServerTest.java b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServerTest.java index c6b5cd8d086..e78000f4aec 100644 --- a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServerTest.java +++ b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusHttpServerTest.java @@ -16,34 +16,51 @@ import com.linecorp.armeria.client.retry.RetryRule; import com.linecorp.armeria.client.retry.RetryingClient; import com.linecorp.armeria.common.AggregatedHttpResponse; +import com.linecorp.armeria.common.HttpData; import com.linecorp.armeria.common.HttpHeaderNames; import com.linecorp.armeria.common.HttpMethod; import com.linecorp.armeria.common.HttpStatus; import com.linecorp.armeria.common.RequestHeaders; import io.github.netmikey.logunit.api.LogCapturer; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.CollectionRegistration; +import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; -import io.opentelemetry.sdk.metrics.internal.export.MetricProducer; import io.opentelemetry.sdk.resources.Resource; +import io.prometheus.metrics.exporter.httpserver.HTTPServer; +import io.prometheus.metrics.exporter.httpserver.MetricsHandler; +import io.prometheus.metrics.expositionformats.generated.com_google_protobuf_4_29_3.Metrics; +import io.prometheus.metrics.model.registry.PrometheusRegistry; +import io.prometheus.metrics.shaded.com_google_protobuf_4_29_3.TextFormat; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.ServerSocket; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Predicate; import java.util.zip.GZIPInputStream; import org.assertj.core.api.InstanceOfAssertFactories; import org.junit.jupiter.api.AfterAll; @@ -51,24 +68,30 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; class PrometheusHttpServerTest { private static final AtomicReference> metricData = new AtomicReference<>(); - private static final MetricProducer metricProducer = metricData::get; + @SuppressWarnings("NonFinalStaticField") static PrometheusHttpServer prometheusServer; + + @SuppressWarnings("NonFinalStaticField") static WebClient client; @RegisterExtension - LogCapturer logs = LogCapturer.create().captureForType(PrometheusHttpServer.class); + LogCapturer logs = LogCapturer.create().captureForType(Otel2PrometheusConverter.class); @BeforeAll static void beforeAll() { // Register the SDK metric producer with the prometheus reader. prometheusServer = PrometheusHttpServer.builder().setHost("localhost").setPort(0).build(); - prometheusServer.register(metricProducer); + prometheusServer.register( + new CollectionRegistration() { + @Override + public Collection collectAllMetrics() { + return metricData.get(); + } + }); client = WebClient.builder("http://localhost:" + prometheusServer.getAddress().getPort()) @@ -86,6 +109,7 @@ static void tearDown() { prometheusServer.shutdown(); } + @SuppressWarnings("DataFlowIssue") @Test void invalidConfig() { assertThatThrownBy(() -> PrometheusHttpServer.builder().setPort(-1)) @@ -97,43 +121,99 @@ void invalidConfig() { assertThatThrownBy(() -> PrometheusHttpServer.builder().setHost("")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("host must not be empty"); + assertThatThrownBy(() -> PrometheusHttpServer.builder().setDefaultAggregationSelector(null)) + .isInstanceOf(NullPointerException.class) + .hasMessage("defaultAggregationSelector"); } - @ParameterizedTest - @ValueSource(strings = {"/metrics", "/"}) - void fetchPrometheus(String endpoint) { - AggregatedHttpResponse response = client.get(endpoint).aggregate().join(); + @Test + void fetchPrometheus() { + AggregatedHttpResponse response = client.get("/metrics").aggregate().join(); assertThat(response.status()).isEqualTo(HttpStatus.OK); assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE)) .isEqualTo("text/plain; version=0.0.4; charset=utf-8"); assertThat(response.contentUtf8()) .isEqualTo( - "# TYPE target info\n" - + "# HELP target Target metadata\n" - + "target_info{kr=\"vr\"} 1\n" - + "# TYPE otel_scope_info info\n" - + "# HELP otel_scope_info Scope metadata\n" - + "otel_scope_info{otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 1\n" - + "# TYPE otel_scope_info info\n" - + "# HELP otel_scope_info Scope metadata\n" - + "otel_scope_info{otel_scope_name=\"http\",otel_scope_version=\"version\"} 1\n" - + "# TYPE grpc_name_total counter\n" - + "# HELP grpc_name_total long_description\n" - + "grpc_name_total{otel_scope_name=\"grpc\",otel_scope_version=\"version\",kp=\"vp\"} 5.0 0\n" - + "# TYPE http_name_total counter\n" - + "# HELP http_name_total double_description\n" - + "http_name_total{otel_scope_name=\"http\",otel_scope_version=\"version\",kp=\"vp\"} 3.5 0\n"); + "# HELP grpc_name_unit_total long_description\n" + + "# TYPE grpc_name_unit_total counter\n" + + "grpc_name_unit_total{kp=\"vp\",otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 5.0\n" + + "# HELP http_name_unit_total double_description\n" + + "# TYPE http_name_unit_total counter\n" + + "http_name_unit_total{kp=\"vp\",otel_scope_name=\"http\",otel_scope_version=\"version\"} 3.5\n" + + "# TYPE target_info gauge\n" + + "target_info{kr=\"vr\"} 1\n"); + } + + @Test + void fetch_ReusableMemoryMode() throws InterruptedException { + try (PrometheusHttpServer prometheusServer = + PrometheusHttpServer.builder() + .setHost("localhost") + .setPort(0) + .setMemoryMode(MemoryMode.REUSABLE_DATA) + .build()) { + AtomicBoolean collectInProgress = new AtomicBoolean(); + AtomicBoolean concurrentRead = new AtomicBoolean(); + prometheusServer.register( + new CollectionRegistration() { + @Override + public Collection collectAllMetrics() { + if (!collectInProgress.compareAndSet(false, true)) { + concurrentRead.set(true); + } + Collection response = metricData.get(); + try { + Thread.sleep(1); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + if (!collectInProgress.compareAndSet(true, false)) { + concurrentRead.set(true); + } + return response; + } + }); + + WebClient client = + WebClient.builder("http://localhost:" + prometheusServer.getAddress().getPort()) + .decorator(RetryingClient.newDecorator(RetryRule.failsafe())) + .build(); + + // Spin up 4 threads calling /metrics simultaneously. If concurrent read happens, + // collectAllMetrics will set concurrentRead to true and the test will fail. + List threads = new ArrayList<>(); + for (int i = 0; i < 4; i++) { + Thread thread = + new Thread( + () -> { + for (int j = 0; j < 10; j++) { + AggregatedHttpResponse response = client.get("/metrics").aggregate().join(); + assertThat(response.status()).isEqualTo(HttpStatus.OK); + } + }); + thread.setDaemon(true); + thread.start(); + threads.add(thread); + } + + // Wait for threads to complete + for (Thread thread : threads) { + thread.join(); + } + + // Confirm no concurrent reads took place + assertThat(concurrentRead.get()).isFalse(); + } } - @ParameterizedTest - @ValueSource(strings = {"/metrics", "/"}) - void fetchOpenMetrics(String endpoint) { + @Test + void fetchOpenMetrics() { AggregatedHttpResponse response = client .execute( RequestHeaders.of( HttpMethod.GET, - endpoint, + "/metrics", HttpHeaderNames.ACCEPT, "application/openmetrics-text")) .aggregate() @@ -143,44 +223,82 @@ void fetchOpenMetrics(String endpoint) { .isEqualTo("application/openmetrics-text; version=1.0.0; charset=utf-8"); assertThat(response.contentUtf8()) .isEqualTo( - "# TYPE target info\n" - + "# HELP target Target metadata\n" + "# TYPE grpc_name_unit counter\n" + + "# UNIT grpc_name_unit unit\n" + + "# HELP grpc_name_unit long_description\n" + + "grpc_name_unit_total{kp=\"vp\",otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 5.0\n" + + "# TYPE http_name_unit counter\n" + + "# UNIT http_name_unit unit\n" + + "# HELP http_name_unit double_description\n" + + "http_name_unit_total{kp=\"vp\",otel_scope_name=\"http\",otel_scope_version=\"version\"} 3.5\n" + + "# TYPE target info\n" + "target_info{kr=\"vr\"} 1\n" - + "# TYPE otel_scope_info info\n" - + "# HELP otel_scope_info Scope metadata\n" - + "otel_scope_info{otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 1\n" - + "# TYPE otel_scope_info info\n" - + "# HELP otel_scope_info Scope metadata\n" - + "otel_scope_info{otel_scope_name=\"http\",otel_scope_version=\"version\"} 1\n" - + "# TYPE grpc_name counter\n" - + "# HELP grpc_name long_description\n" - + "grpc_name_total{otel_scope_name=\"grpc\",otel_scope_version=\"version\",kp=\"vp\"} 5.0 0.000\n" - + "# TYPE http_name counter\n" - + "# HELP http_name double_description\n" - + "http_name_total{otel_scope_name=\"http\",otel_scope_version=\"version\",kp=\"vp\"} 3.5 0.000\n" + "# EOF\n"); } + @SuppressWarnings("ConcatenationWithEmptyString") @Test void fetchFiltered() { AggregatedHttpResponse response = - client.get("/?name[]=grpc_name_total&name[]=bears_total").aggregate().join(); + client + .get("/metrics?name[]=grpc_name_unit_total&name[]=bears_total&name[]=target_info") + .aggregate() + .join(); assertThat(response.status()).isEqualTo(HttpStatus.OK); assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE)) .isEqualTo("text/plain; version=0.0.4; charset=utf-8"); assertThat(response.contentUtf8()) .isEqualTo( - "# TYPE target info\n" - + "# HELP target Target metadata\n" - + "target_info{kr=\"vr\"} 1\n" - + "# TYPE otel_scope_info info\n" - + "# HELP otel_scope_info Scope metadata\n" - + "otel_scope_info{otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 1\n" - + "# TYPE grpc_name_total counter\n" - + "# HELP grpc_name_total long_description\n" - + "grpc_name_total{otel_scope_name=\"grpc\",otel_scope_version=\"version\",kp=\"vp\"} 5.0 0\n"); + "" + + "# HELP grpc_name_unit_total long_description\n" + + "# TYPE grpc_name_unit_total counter\n" + + "grpc_name_unit_total{kp=\"vp\",otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 5.0\n" + + "# TYPE target_info gauge\n" + + "target_info{kr=\"vr\"} 1\n"); } + @Test + void fetchOverrideDefaultHandler() { + PrometheusRegistry registry = new PrometheusRegistry(); + try (PrometheusHttpServer prometheusServer = + PrometheusHttpServer.builder() + .setHost("localhost") + .setPort(0) + .setPrometheusRegistry(registry) + // Set the default handler to serve metrics on /** + .setDefaultHandler(new MetricsHandler(registry)) + .build()) { + prometheusServer.register( + new CollectionRegistration() { + @Override + public Collection collectAllMetrics() { + return metricData.get(); + } + }); + WebClient client = + WebClient.builder("http://localhost:" + prometheusServer.getAddress().getPort()) + .decorator(RetryingClient.newDecorator(RetryRule.failsafe())) + .build(); + + // Fetch metrics from / instead of /metrics + AggregatedHttpResponse response = client.get("/").aggregate().join(); + assertThat(response.status()).isEqualTo(HttpStatus.OK); + assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE)) + .isEqualTo("text/plain; version=0.0.4; charset=utf-8"); + assertThat(response.contentUtf8()) + .isEqualTo( + "# HELP grpc_name_unit_total long_description\n" + + "# TYPE grpc_name_unit_total counter\n" + + "grpc_name_unit_total{kp=\"vp\",otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 5.0\n" + + "# HELP http_name_unit_total double_description\n" + + "# TYPE http_name_unit_total counter\n" + + "http_name_unit_total{kp=\"vp\",otel_scope_name=\"http\",otel_scope_version=\"version\"} 3.5\n" + + "# TYPE target_info gauge\n" + + "target_info{kr=\"vr\"} 1\n"); + } + } + + @SuppressWarnings("resource") @Test void fetchPrometheusCompressed() throws IOException { WebClient client = @@ -188,7 +306,7 @@ void fetchPrometheusCompressed() throws IOException { .decorator(RetryingClient.newDecorator(RetryRule.failsafe())) .addHeader(HttpHeaderNames.ACCEPT_ENCODING, "gzip") .build(); - AggregatedHttpResponse response = client.get("/").aggregate().join(); + AggregatedHttpResponse response = client.get("/metrics").aggregate().join(); assertThat(response.status()).isEqualTo(HttpStatus.OK); assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE)) .isEqualTo("text/plain; version=0.0.4; charset=utf-8"); @@ -197,26 +315,20 @@ void fetchPrometheusCompressed() throws IOException { String content = new String(ByteStreams.toByteArray(gis), StandardCharsets.UTF_8); assertThat(content) .isEqualTo( - "# TYPE target info\n" - + "# HELP target Target metadata\n" - + "target_info{kr=\"vr\"} 1\n" - + "# TYPE otel_scope_info info\n" - + "# HELP otel_scope_info Scope metadata\n" - + "otel_scope_info{otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 1\n" - + "# TYPE otel_scope_info info\n" - + "# HELP otel_scope_info Scope metadata\n" - + "otel_scope_info{otel_scope_name=\"http\",otel_scope_version=\"version\"} 1\n" - + "# TYPE grpc_name_total counter\n" - + "# HELP grpc_name_total long_description\n" - + "grpc_name_total{otel_scope_name=\"grpc\",otel_scope_version=\"version\",kp=\"vp\"} 5.0 0\n" - + "# TYPE http_name_total counter\n" - + "# HELP http_name_total double_description\n" - + "http_name_total{otel_scope_name=\"http\",otel_scope_version=\"version\",kp=\"vp\"} 3.5 0\n"); + "# HELP grpc_name_unit_total long_description\n" + + "# TYPE grpc_name_unit_total counter\n" + + "grpc_name_unit_total{kp=\"vp\",otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 5.0\n" + + "# HELP http_name_unit_total double_description\n" + + "# TYPE http_name_unit_total counter\n" + + "http_name_unit_total{kp=\"vp\",otel_scope_name=\"http\",otel_scope_version=\"version\"} 3.5\n" + + "# TYPE target_info gauge\n" + + "target_info{kr=\"vr\"} 1\n"); } + @SuppressWarnings("resource") @Test void fetchHead() { - AggregatedHttpResponse response = client.head("/").aggregate().join(); + AggregatedHttpResponse response = client.head("/metrics").aggregate().join(); assertThat(response.status()).isEqualTo(HttpStatus.OK); assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE)) .isEqualTo("text/plain; version=0.0.4; charset=utf-8"); @@ -228,11 +340,12 @@ void fetchHealth() { AggregatedHttpResponse response = client.get("/-/healthy").aggregate().join(); assertThat(response.status()).isEqualTo(HttpStatus.OK); - assertThat(response.contentUtf8()).isEqualTo("Exporter is Healthy."); + assertThat(response.contentUtf8()).isEqualTo("Exporter is healthy.\n"); } @Test @SuppressLogger(PrometheusHttpServer.class) + @SuppressLogger(Otel2PrometheusConverter.class) void fetch_DuplicateMetrics() { Resource resource = Resource.create(Attributes.of(stringKey("kr"), "vr")); metricData.set( @@ -267,39 +380,27 @@ void fetch_DuplicateMetrics() { InstrumentationScopeInfo.create("scope3"), "foo_unit_total", "unused", - "unit", + "", ImmutableGaugeData.create( Collections.singletonList( ImmutableLongPointData.create(123, 456, Attributes.empty(), 3)))))); - AggregatedHttpResponse response = client.get("/").aggregate().join(); + AggregatedHttpResponse response = client.get("/metrics").aggregate().join(); assertThat(response.status()).isEqualTo(HttpStatus.OK); assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE)) .isEqualTo("text/plain; version=0.0.4; charset=utf-8"); assertThat(response.contentUtf8()) .isEqualTo( - "# TYPE target info\n" - + "# HELP target Target metadata\n" - + "target_info{kr=\"vr\"} 1\n" - + "# TYPE otel_scope_info info\n" - + "# HELP otel_scope_info Scope metadata\n" - + "otel_scope_info{otel_scope_name=\"scope1\"} 1\n" - + "# TYPE otel_scope_info info\n" - + "# HELP otel_scope_info Scope metadata\n" - + "otel_scope_info{otel_scope_name=\"scope2\"} 1\n" - + "# TYPE foo_unit_total counter\n" - + "# HELP foo_unit_total description1\n" - + "foo_unit_total{otel_scope_name=\"scope1\"} 1.0 0\n" - + "foo_unit_total{otel_scope_name=\"scope2\"} 2.0 0\n"); + "# TYPE foo_unit_total counter\n" + + "foo_unit_total{otel_scope_name=\"scope1\"} 1.0\n" + + "foo_unit_total{otel_scope_name=\"scope2\"} 2.0\n" + + "# TYPE target_info gauge\n" + + "target_info{kr=\"vr\"} 1\n"); // Validate conflict warning message assertThat(logs.getEvents()).hasSize(1); logs.assertContains( - "Metric conflict(s) detected. Multiple metrics with same name but different type: [foo_unit_total]"); - - // Make another request and confirm warning is only logged once - client.get("/").aggregate().join(); - assertThat(logs.getEvents()).hasSize(1); + "Conflicting metrics: Multiple metrics with name foo_unit but different units found. Dropping the one with unit null."); } @Test @@ -311,8 +412,9 @@ void stringRepresentation() { @Test void defaultExecutor() { assertThat(prometheusServer) - .extracting("executor", as(InstanceOfAssertFactories.type(ThreadPoolExecutor.class))) - .satisfies(executor -> assertThat(executor.getCorePoolSize()).isEqualTo(5)); + .extracting("httpServer", as(InstanceOfAssertFactories.type(HTTPServer.class))) + .extracting("executorService", as(InstanceOfAssertFactories.type(ThreadPoolExecutor.class))) + .satisfies(executor -> assertThat(executor.getCorePoolSize()).isEqualTo(1)); } @Test @@ -326,15 +428,62 @@ void customExecutor() throws IOException { PrometheusHttpServer.builder() .setHost("localhost") .setPort(port) + // Memory mode must be IMMUTABLE_DATA to set custom executor + .setMemoryMode(MemoryMode.IMMUTABLE_DATA) .setExecutor(scheduledExecutor) .build()) { assertThat(server) + .extracting("httpServer", as(InstanceOfAssertFactories.type(HTTPServer.class))) .extracting( - "executor", as(InstanceOfAssertFactories.type(ScheduledThreadPoolExecutor.class))) + "executorService", + as(InstanceOfAssertFactories.type(ScheduledThreadPoolExecutor.class))) .satisfies(executor -> assertThat(executor).isSameAs(scheduledExecutor)); } } + @Test + void addResourceAttributesWorks() { + WebClient testClient; + try (PrometheusHttpServer testPrometheusServer = + PrometheusHttpServer.builder() + .setHost("localhost") + .setPort(0) + .setAllowedResourceAttributesFilter(Predicates.ALLOW_ALL) + .build()) { + testPrometheusServer.register( + new CollectionRegistration() { + @Override + public Collection collectAllMetrics() { + return metricData.get(); + } + }); + + testClient = + WebClient.builder("http://localhost:" + testPrometheusServer.getAddress().getPort()) + .decorator(RetryingClient.newDecorator(RetryRule.failsafe())) + .build(); + + AggregatedHttpResponse response = testClient.get("/metrics").aggregate().join(); + assertThat(response.status()).isEqualTo(HttpStatus.OK); + assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE)) + .isEqualTo("text/plain; version=0.0.4; charset=utf-8"); + assertThat(response.contentUtf8()) + .isEqualTo( + "# HELP grpc_name_unit_total long_description\n" + + "# TYPE grpc_name_unit_total counter\n" + + // Note the added resource attributes as labels + + "grpc_name_unit_total{kp=\"vp\",kr=\"vr\",otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 5.0\n" + + "# HELP http_name_unit_total double_description\n" + + "# TYPE http_name_unit_total counter\n" + + // Note the added resource attributes as labels + + "http_name_unit_total{kp=\"vp\",kr=\"vr\",otel_scope_name=\"http\",otel_scope_version=\"version\"} 3.5\n" + + "# TYPE target_info gauge\n" + + "target_info{kr=\"vr\"} 1\n"); + } + } + private static List generateTestData() { return ImmutableList.of( ImmutableMetricData.createLongSum( @@ -342,7 +491,7 @@ private static List generateTestData() { InstrumentationScopeInfo.builder("grpc").setVersion("version").build(), "grpc.name", "long_description", - "1", + "unit", ImmutableSumData.create( /* isMonotonic= */ true, AggregationTemporality.CUMULATIVE, @@ -354,7 +503,7 @@ private static List generateTestData() { InstrumentationScopeInfo.builder("http").setVersion("version").build(), "http.name", "double_description", - "1", + "unit", ImmutableSumData.create( /* isMonotonic= */ true, AggregationTemporality.CUMULATIVE, @@ -362,4 +511,104 @@ private static List generateTestData() { ImmutableDoublePointData.create( 123, 456, Attributes.of(stringKey("kp"), "vp"), 3.5))))); } + + @Test + void toBuilder() { + PrometheusHttpServerBuilder builder = PrometheusHttpServer.builder(); + builder.setHost("localhost"); + builder.setPort(1234); + builder.setOtelScopeEnabled(false); + + Predicate resourceAttributesFilter = s -> false; + builder.setAllowedResourceAttributesFilter(resourceAttributesFilter); + + ExecutorService executor = Executors.newSingleThreadExecutor(); + builder.setExecutor(executor).setMemoryMode(MemoryMode.IMMUTABLE_DATA); + + PrometheusRegistry prometheusRegistry = new PrometheusRegistry(); + builder.setPrometheusRegistry(prometheusRegistry); + + PrometheusHttpServer httpServer = builder.build(); + PrometheusHttpServerBuilder fromOriginalBuilder = httpServer.toBuilder(); + httpServer.close(); + assertThat(fromOriginalBuilder) + .isInstanceOf(PrometheusHttpServerBuilder.class) + .hasFieldOrPropertyWithValue("host", "localhost") + .hasFieldOrPropertyWithValue("port", 1234) + .hasFieldOrPropertyWithValue("otelScopeEnabled", false) + .hasFieldOrPropertyWithValue("allowedResourceAttributesFilter", resourceAttributesFilter) + .hasFieldOrPropertyWithValue("executor", executor) + .hasFieldOrPropertyWithValue("prometheusRegistry", prometheusRegistry); + } + + /** + * Set the default histogram aggregation to be {@link + * Aggregation#base2ExponentialBucketHistogram()}. In order to validate that exponential + * histograms are produced, we request protobuf encoded metrics when scraping since the prometheus + * text format does not support native histograms. We parse the binary content protobuf payload to + * the protobuf java bindings, and assert against the string representation. + */ + @Test + void histogramDefaultBase2ExponentialHistogram() throws IOException { + PrometheusHttpServer prometheusServer = + PrometheusHttpServer.builder() + .setHost("localhost") + .setPort(0) + .setDefaultAggregationSelector( + DefaultAggregationSelector.getDefault() + .with(InstrumentType.HISTOGRAM, Aggregation.base2ExponentialBucketHistogram())) + .build(); + try (SdkMeterProvider meterProvider = + SdkMeterProvider.builder().registerMetricReader(prometheusServer).build()) { + DoubleHistogram histogram = meterProvider.get("meter").histogramBuilder("histogram").build(); + histogram.record(1.0); + + WebClient client = + WebClient.builder("http://localhost:" + prometheusServer.getAddress().getPort()) + .decorator(RetryingClient.newDecorator(RetryRule.failsafe())) + // Request protobuf binary encoding, which is required for the prometheus native + // histogram format + .addHeader( + "Accept", + "application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily") + .build(); + AggregatedHttpResponse response = client.get("/metrics").aggregate().join(); + assertThat(response.status()).isEqualTo(HttpStatus.OK); + assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE)) + .isEqualTo( + "application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited"); + // Parse the data to Metrics.MetricFamily protobuf java binding and assert against the string + // representation + try (HttpData data = response.content()) { + Metrics.MetricFamily metricFamily = + Metrics.MetricFamily.parseDelimitedFrom(data.toInputStream()); + String s = TextFormat.printer().printToString(metricFamily); + assertThat(s) + .isEqualTo( + "name: \"histogram\"\n" + + "help: \"\"\n" + + "type: HISTOGRAM\n" + + "metric {\n" + + " label {\n" + + " name: \"otel_scope_name\"\n" + + " value: \"meter\"\n" + + " }\n" + + " histogram {\n" + + " sample_count: 1\n" + + " sample_sum: 1.0\n" + + " schema: 8\n" + + " zero_threshold: 0.0\n" + + " zero_count: 0\n" + + " positive_span {\n" + + " offset: 0\n" + + " length: 1\n" + + " }\n" + + " positive_delta: 1\n" + + " }\n" + + "}\n"); + } + } finally { + prometheusServer.shutdown(); + } + } } diff --git a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusIntegrationTest.java b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusIntegrationTest.java index f98b06c7da6..ce178566030 100644 --- a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusIntegrationTest.java +++ b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusIntegrationTest.java @@ -11,7 +11,7 @@ import com.fasterxml.jackson.core.TreeNode; import com.fasterxml.jackson.jr.ob.JSON; -import com.fasterxml.jackson.jr.stree.JacksonJrsTreeCodec; +import com.fasterxml.jackson.jr.stree.JrSimpleTreeExtension; import com.fasterxml.jackson.jr.stree.JrsString; import com.google.common.io.Resources; import com.linecorp.armeria.client.WebClient; @@ -35,7 +35,10 @@ @Testcontainers(disabledWithoutDocker = true) class PrometheusIntegrationTest { + @SuppressWarnings("NonFinalStaticField") private static SdkMeterProvider meterProvider; + + @SuppressWarnings("NonFinalStaticField") private static GenericContainer prometheus; @BeforeAll @@ -85,7 +88,7 @@ void endToEnd() { result -> result.record(9, Attributes.builder().put("animal", "cat").build())); WebClient promClient = WebClient.of("http://localhost:" + prometheus.getMappedPort(9090)); - JSON json = JSON.builder().treeCodec(new JacksonJrsTreeCodec()).build(); + JSON json = JSON.builder().register(new JrSimpleTreeExtension()).build(); await() .untilAsserted( () -> { diff --git a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusMetricNameMapperTest.java b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusMetricNameMapperTest.java deleted file mode 100644 index 6a677c361ef..00000000000 --- a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusMetricNameMapperTest.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.prometheus; - -import static io.opentelemetry.exporter.prometheus.TestConstants.DELTA_HISTOGRAM; -import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE; -import static io.opentelemetry.exporter.prometheus.TestConstants.MONOTONIC_CUMULATIVE_LONG_SUM; -import static io.opentelemetry.exporter.prometheus.TestConstants.SUMMARY; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.BiFunction; -import java.util.stream.Stream; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -class PrometheusMetricNameMapperTest { - - @Test - void prometheusMetricNameMapperCaching() { - AtomicInteger count = new AtomicInteger(); - BiFunction delegate = - (metricData, prometheusType) -> - String.join( - "_", - metricData.getName(), - prometheusType.name(), - Integer.toString(count.incrementAndGet())); - PrometheusMetricNameMapper mapper = new PrometheusMetricNameMapper(delegate); - - assertThat(mapper.apply(MONOTONIC_CUMULATIVE_LONG_SUM, PrometheusType.GAUGE)) - .isEqualTo("monotonic.cumulative.long.sum_GAUGE_1"); - assertThat(mapper.apply(MONOTONIC_CUMULATIVE_LONG_SUM, PrometheusType.GAUGE)) - .isEqualTo("monotonic.cumulative.long.sum_GAUGE_1"); - assertThat(mapper.apply(MONOTONIC_CUMULATIVE_LONG_SUM, PrometheusType.GAUGE)) - .isEqualTo("monotonic.cumulative.long.sum_GAUGE_1"); - assertThat(mapper.apply(MONOTONIC_CUMULATIVE_LONG_SUM, PrometheusType.GAUGE)) - .isEqualTo("monotonic.cumulative.long.sum_GAUGE_1"); - assertThat(mapper.apply(MONOTONIC_CUMULATIVE_LONG_SUM, PrometheusType.GAUGE)) - .isEqualTo("monotonic.cumulative.long.sum_GAUGE_1"); - assertThat(count).hasValue(1); - } - - @ParameterizedTest - @MethodSource("provideRawMetricDataForTest") - void metricNameSerializationTest(MetricData metricData, String expectedSerializedName) { - assertEquals( - expectedSerializedName, - PrometheusMetricNameMapper.INSTANCE.apply( - metricData, PrometheusType.forMetric(metricData))); - } - - private static Stream provideRawMetricDataForTest() { - return Stream.of( - // special case for gauge - Arguments.of(createSampleMetricData("sample", "1", PrometheusType.GAUGE), "sample_ratio"), - // special case for gauge with drop - metric unit should match "1" to be converted to - // "ratio" - Arguments.of( - createSampleMetricData("sample", "1{dropped}", PrometheusType.GAUGE), "sample"), - // Gauge without "1" as unit - Arguments.of(createSampleMetricData("sample", "unit", PrometheusType.GAUGE), "sample_unit"), - // special case with counter - Arguments.of( - createSampleMetricData("sample", "unit", PrometheusType.COUNTER), "sample_unit_total"), - // special case unit "1", but no gauge - "1" is dropped - Arguments.of(createSampleMetricData("sample", "1", PrometheusType.COUNTER), "sample_total"), - // units expressed as numbers other than 1 are retained - Arguments.of( - createSampleMetricData("sample", "2", PrometheusType.COUNTER), "sample_2_total"), - // metric name with unsupported characters - Arguments.of( - createSampleMetricData("s%%ple", "%/m", PrometheusType.SUMMARY), - "s_ple_percent_per_minute"), - // metric name with dropped portions - Arguments.of( - createSampleMetricData("s%%ple", "%/m", PrometheusType.SUMMARY), - "s_ple_percent_per_minute"), - // metric unit as a number other than 1 is not treated specially - Arguments.of( - createSampleMetricData("metric_name", "2", PrometheusType.SUMMARY), "metric_name_2"), - // metric unit is not appended if the name already contains the unit - Arguments.of( - createSampleMetricData("metric_name_total", "total", PrometheusType.COUNTER), - "metric_name_total"), - // metric unit is not appended if the name already contains the unit - special case for - // total with non-counter type - Arguments.of( - createSampleMetricData("metric_name_total", "total", PrometheusType.SUMMARY), - "metric_name_total"), - // metric unit not appended if present in metric name - special case for ratio - Arguments.of( - createSampleMetricData("metric_name_ratio", "1", PrometheusType.GAUGE), - "metric_name_ratio"), - // metric unit not appended if present in metric name - special case for ratio - unit not - // gauge - Arguments.of( - createSampleMetricData("metric_name_ratio", "1", PrometheusType.SUMMARY), - "metric_name_ratio"), - // metric unit is not appended if the name already contains the unit - unit can be anywhere - Arguments.of( - createSampleMetricData("metric_hertz", "hertz", PrometheusType.GAUGE), "metric_hertz"), - // metric unit is not appended if the name already contains the unit - applies to every unit - Arguments.of( - createSampleMetricData("metric_hertz_total", "hertz_total", PrometheusType.COUNTER), - "metric_hertz_total"), - // metric unit is not appended if the name already contains the unit - order matters - Arguments.of( - createSampleMetricData("metric_total_hertz", "hertz_total", PrometheusType.COUNTER), - "metric_total_hertz_hertz_total_total"), - // metric name cannot start with a number - Arguments.of( - createSampleMetricData("2_metric_name", "By", PrometheusType.SUMMARY), - "_metric_name_bytes")); - } - - static MetricData createSampleMetricData( - String metricName, String metricUnit, PrometheusType prometheusType) { - switch (prometheusType) { - case SUMMARY: - return ImmutableMetricData.createDoubleSummary( - SUMMARY.getResource(), - SUMMARY.getInstrumentationScopeInfo(), - metricName, - SUMMARY.getDescription(), - metricUnit, - SUMMARY.getSummaryData()); - case COUNTER: - return ImmutableMetricData.createLongSum( - MONOTONIC_CUMULATIVE_LONG_SUM.getResource(), - MONOTONIC_CUMULATIVE_LONG_SUM.getInstrumentationScopeInfo(), - metricName, - MONOTONIC_CUMULATIVE_LONG_SUM.getDescription(), - metricUnit, - MONOTONIC_CUMULATIVE_LONG_SUM.getLongSumData()); - case GAUGE: - return ImmutableMetricData.createDoubleGauge( - DOUBLE_GAUGE.getResource(), - DOUBLE_GAUGE.getInstrumentationScopeInfo(), - metricName, - DOUBLE_GAUGE.getDescription(), - metricUnit, - DOUBLE_GAUGE.getDoubleGaugeData()); - case HISTOGRAM: - return ImmutableMetricData.createDoubleHistogram( - DELTA_HISTOGRAM.getResource(), - DELTA_HISTOGRAM.getInstrumentationScopeInfo(), - metricName, - DELTA_HISTOGRAM.getDescription(), - metricUnit, - DELTA_HISTOGRAM.getHistogramData()); - } - throw new IllegalArgumentException(); - } -} diff --git a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusMetricReaderTest.java b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusMetricReaderTest.java new file mode 100644 index 00000000000..20076cc9c9d --- /dev/null +++ b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusMetricReaderTest.java @@ -0,0 +1,1162 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.prometheus; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleCounter; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.DoubleUpDownCounter; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.InstrumentSelector; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.View; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.time.TestClock; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.prometheus.metrics.expositionformats.OpenMetricsTextFormatWriter; +import io.prometheus.metrics.expositionformats.PrometheusProtobufWriter; +import io.prometheus.metrics.model.snapshots.HistogramSnapshot; +import io.prometheus.metrics.model.snapshots.HistogramSnapshot.HistogramDataPointSnapshot; +import io.prometheus.metrics.model.snapshots.MetricSnapshots; +import io.prometheus.metrics.model.snapshots.NativeHistogramBuckets; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.time.Instant; +import java.util.HashMap; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import java.util.regex.MatchResult; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +@SuppressWarnings({"resource", "ConcatenationWithEmptyString"}) +class PrometheusMetricReaderTest { + + private final TestClock testClock = TestClock.create(); + private String createdTimestamp; + private PrometheusMetricReader reader; + private Meter meter; + private Tracer tracer; + + @SuppressWarnings("resource") + @BeforeEach + void setUp() { + this.testClock.setTime(Instant.ofEpochMilli((System.currentTimeMillis() / 100) * 100)); + this.createdTimestamp = convertTimestamp(testClock.now()); + this.reader = new PrometheusMetricReader(true, /* allowedResourceAttributesFilter= */ null); + this.meter = + SdkMeterProvider.builder() + .setClock(testClock) + .registerMetricReader(this.reader) + .setResource( + Resource.getDefault().toBuilder().put("telemetry.sdk.version", "1.x.x").build()) + .registerView( + InstrumentSelector.builder().setName("my.exponential.histogram").build(), + View.builder() + .setAggregation(Aggregation.base2ExponentialBucketHistogram()) + .build()) + .build() + .meterBuilder("test") + .build(); + this.tracer = + SdkTracerProvider.builder().setClock(testClock).build().tracerBuilder("test").build(); + } + + @Test + void longCounterComplete() throws IOException { + LongCounter counter = + meter + .counterBuilder("requests.size") + .setDescription("some help text") + .setUnit("By") + .build(); + Span span1 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span1.makeCurrent()) { + counter.add(3, Attributes.builder().put("animal", "bear").build()); + } finally { + span1.end(); + } + Span span2 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span2.makeCurrent()) { + counter.add(2, Attributes.builder().put("animal", "mouse").build()); + } finally { + span2.end(); + } + assertCounterComplete(reader.collect(), span1, span2); + } + + @Test + void doubleCounterComplete() throws IOException { + DoubleCounter counter = + meter + .counterBuilder("requests.size") + .setDescription("some help text") + .setUnit("By") + .ofDoubles() + .build(); + Span span1 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span1.makeCurrent()) { + counter.add(3.0, Attributes.builder().put("animal", "bear").build()); + } finally { + span1.end(); + } + Span span2 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span2.makeCurrent()) { + counter.add(2.0, Attributes.builder().put("animal", "mouse").build()); + } finally { + span2.end(); + } + assertCounterComplete(reader.collect(), span1, span2); + } + + private void assertCounterComplete(MetricSnapshots snapshots, Span span1, Span span2) + throws IOException { + String expected = + "" + + "# TYPE requests_size_bytes counter\n" + + "# UNIT requests_size_bytes bytes\n" + + "# HELP requests_size_bytes some help text\n" + + "requests_size_bytes_total{animal=\"bear\",otel_scope_name=\"test\"} 3.0 # {span_id=\"" + + span1.getSpanContext().getSpanId() + + "\",trace_id=\"" + + span1.getSpanContext().getTraceId() + + "\"} 3.0 \n" + + "requests_size_bytes_created{animal=\"bear\",otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "requests_size_bytes_total{animal=\"mouse\",otel_scope_name=\"test\"} 2.0 # {span_id=\"" + + span2.getSpanContext().getSpanId() + + "\",trace_id=\"" + + span2.getSpanContext().getTraceId() + + "\"} 2.0 \n" + + "requests_size_bytes_created{animal=\"mouse\",otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# EOF\n"; + assertMatches(expected, toOpenMetrics(snapshots)); + } + + @Test + void longCounterMinimal() throws IOException { + LongCounter counter = meter.counterBuilder("requests").build(); + counter.add(2); + assertCounterMinimal(reader.collect()); + } + + @Test + void doubleCounterMinimal() throws IOException { + DoubleCounter counter = meter.counterBuilder("requests").ofDoubles().build(); + counter.add(2.0); + assertCounterMinimal(reader.collect()); + } + + private void assertCounterMinimal(MetricSnapshots snapshots) throws IOException { + String expected = + "" + + "# TYPE requests counter\n" + + "requests_total{otel_scope_name=\"test\"} 2.0\n" + + "requests_created{otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# EOF\n"; + assertThat(toOpenMetrics(snapshots)).isEqualTo(expected); + } + + @Test + void longUpDownCounterComplete() throws IOException { + LongUpDownCounter counter = + meter + .upDownCounterBuilder("queue.size") + .setDescription("some help text") + .setUnit("By") + .build(); + Span span1 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span1.makeCurrent()) { + counter.add(3, Attributes.builder().put("animal", "bear").build()); + } finally { + span1.end(); + } + Span span2 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span2.makeCurrent()) { + counter.add(2, Attributes.builder().put("animal", "mouse").build()); + } finally { + span2.end(); + } + assertUpDownCounterComplete(reader.collect(), span1, span2); + } + + @Test + void doubleUpDownCounterComplete() throws IOException { + DoubleUpDownCounter counter = + meter + .upDownCounterBuilder("queue.size") + .setDescription("some help text") + .setUnit("By") + .ofDoubles() + .build(); + Span span1 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span1.makeCurrent()) { + counter.add(3.0, Attributes.builder().put("animal", "bear").build()); + } finally { + span1.end(); + } + Span span2 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span2.makeCurrent()) { + counter.add(2.0, Attributes.builder().put("animal", "mouse").build()); + } finally { + span2.end(); + } + assertUpDownCounterComplete(reader.collect(), span1, span2); + } + + private static void assertUpDownCounterComplete(MetricSnapshots snapshots, Span span1, Span span2) + throws IOException { + String expected = + "" + + "# TYPE queue_size_bytes gauge\n" + + "# UNIT queue_size_bytes bytes\n" + + "# HELP queue_size_bytes some help text\n" + + "queue_size_bytes{animal=\"bear\",otel_scope_name=\"test\"} 3.0 # {span_id=\"" + + span1.getSpanContext().getSpanId() + + "\",trace_id=\"" + + span1.getSpanContext().getTraceId() + + "\"} 3.0 \n" + + "queue_size_bytes{animal=\"mouse\",otel_scope_name=\"test\"} 2.0 # {span_id=\"" + + span2.getSpanContext().getSpanId() + + "\",trace_id=\"" + + span2.getSpanContext().getTraceId() + + "\"} 2.0 \n" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# EOF\n"; + assertMatches(expected, toOpenMetrics(snapshots)); + } + + @Test + void longUpDownCounterMinimal() throws IOException { + LongUpDownCounter counter = meter.upDownCounterBuilder("users.active").build(); + counter.add(27); + assertUpDownCounterMinimal(reader.collect()); + } + + @Test + void doubleUpDownCounterMinimal() throws IOException { + DoubleUpDownCounter counter = meter.upDownCounterBuilder("users.active").ofDoubles().build(); + counter.add(27.0); + assertUpDownCounterMinimal(reader.collect()); + } + + private static void assertUpDownCounterMinimal(MetricSnapshots snapshots) throws IOException { + String expected = + "" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# TYPE users_active gauge\n" + + "users_active{otel_scope_name=\"test\"} 27.0\n" + + "# EOF\n"; + assertThat(toOpenMetrics(snapshots)).isEqualTo(expected); + } + + @Test + void longGaugeComplete() throws IOException { + meter + .gaugeBuilder("temperature") + .setUnit("Cel") + .setDescription("help text") + .ofLongs() + .buildWithCallback( + m -> { + m.record(23, Attributes.builder().put("location", "inside").build()); + m.record(17, Attributes.builder().put("location", "outside").build()); + }); + assertGaugeComplete(reader.collect()); + } + + @Test + void doubleGaugeComplete() throws IOException { + meter + .gaugeBuilder("temperature") + .setUnit("Cel") + .setDescription("help text") + .buildWithCallback( + m -> { + m.record(23.0, Attributes.builder().put("location", "inside").build()); + m.record(17.0, Attributes.builder().put("location", "outside").build()); + }); + assertGaugeComplete(reader.collect()); + } + + private static void assertGaugeComplete(MetricSnapshots snapshots) throws IOException { + String expected = + "" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# TYPE temperature_celsius gauge\n" + + "# UNIT temperature_celsius celsius\n" + + "# HELP temperature_celsius help text\n" + + "temperature_celsius{location=\"inside\",otel_scope_name=\"test\"} 23.0\n" + + "temperature_celsius{location=\"outside\",otel_scope_name=\"test\"} 17.0\n" + + "# EOF\n"; + assertThat(toOpenMetrics(snapshots)).isEqualTo(expected); + } + + @Test + void longGaugeMinimal() throws IOException { + meter.gaugeBuilder("my_gauge").ofLongs().buildWithCallback(m -> m.record(2)); + assertGaugeMinimal(reader.collect()); + } + + @Test + void doubleGaugeMinimal() throws IOException { + meter.gaugeBuilder("my_gauge").buildWithCallback(m -> m.record(2.0)); + assertGaugeMinimal(reader.collect()); + } + + private static void assertGaugeMinimal(MetricSnapshots snapshots) throws IOException { + String expected = + "" + + "# TYPE my_gauge gauge\n" + + "my_gauge{otel_scope_name=\"test\"} 2.0\n" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# EOF\n"; + assertThat(toOpenMetrics(snapshots)).isEqualTo(expected); + } + + @Test + void longHistogramComplete() throws IOException { + LongHistogram histogram = + meter + .histogramBuilder("request.size") + .setDescription("some help text") + .setUnit("By") + .ofLongs() + .build(); + Span span1 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span1.makeCurrent()) { + histogram.record(173, Attributes.builder().put("animal", "bear").build()); + } finally { + span1.end(); + } + Span span2 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span2.makeCurrent()) { + histogram.record(400, Attributes.builder().put("animal", "bear").build()); + } finally { + span1.end(); + } + Span span3 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span3.makeCurrent()) { + histogram.record(204, Attributes.builder().put("animal", "mouse").build()); + } finally { + span3.end(); + } + assertHistogramComplete(reader.collect(), span1, span2, span3); + } + + @Test + void doubleHistogramComplete() throws IOException { + DoubleHistogram histogram = + meter + .histogramBuilder("request.size") + .setDescription("some help text") + .setUnit("By") + .build(); + Span span1 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span1.makeCurrent()) { + histogram.record(173.0, Attributes.builder().put("animal", "bear").build()); + } finally { + span1.end(); + } + Span span2 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span2.makeCurrent()) { + histogram.record(400.0, Attributes.builder().put("animal", "bear").build()); + } finally { + span1.end(); + } + Span span3 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span3.makeCurrent()) { + histogram.record(204.0, Attributes.builder().put("animal", "mouse").build()); + } finally { + span3.end(); + } + assertHistogramComplete(reader.collect(), span1, span2, span3); + } + + private void assertHistogramComplete( + MetricSnapshots snapshots, Span span1, Span span2, Span span3) throws IOException { + String expected = + "" + + "# TYPE request_size_bytes histogram\n" + + "# UNIT request_size_bytes bytes\n" + + "# HELP request_size_bytes some help text\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"0.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"5.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"10.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"25.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"50.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"75.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"100.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"250.0\"} 1 # {span_id=\"" + + span1.getSpanContext().getSpanId() + + "\",trace_id=\"" + + span1.getSpanContext().getTraceId() + + "\"} 173.0 \n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"500.0\"} 2 # {span_id=\"" + + span2.getSpanContext().getSpanId() + + "\",trace_id=\"" + + span2.getSpanContext().getTraceId() + + "\"} 400.0 \n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"750.0\"} 2\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"1000.0\"} 2\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"2500.0\"} 2\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"5000.0\"} 2\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"7500.0\"} 2\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"10000.0\"} 2\n" + + "request_size_bytes_bucket{animal=\"bear\",otel_scope_name=\"test\",le=\"+Inf\"} 2\n" + + "request_size_bytes_count{animal=\"bear\",otel_scope_name=\"test\"} 2 # {span_id=\"" + + "" + + "\",trace_id=\"" + + "" + + "\"} \n" + + "request_size_bytes_sum{animal=\"bear\",otel_scope_name=\"test\"} 573.0\n" + + "request_size_bytes_created{animal=\"bear\",otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"0.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"5.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"10.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"25.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"50.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"75.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"100.0\"} 0\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"250.0\"} 1 # {span_id=\"" + + span3.getSpanContext().getSpanId() + + "\",trace_id=\"" + + span3.getSpanContext().getTraceId() + + "\"} 204.0 \n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"500.0\"} 1\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"750.0\"} 1\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"1000.0\"} 1\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"2500.0\"} 1\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"5000.0\"} 1\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"7500.0\"} 1\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"10000.0\"} 1\n" + + "request_size_bytes_bucket{animal=\"mouse\",otel_scope_name=\"test\",le=\"+Inf\"} 1\n" + + "request_size_bytes_count{animal=\"mouse\",otel_scope_name=\"test\"} 1 # {span_id=\"" + + span3.getSpanContext().getSpanId() + + "\",trace_id=\"" + + span3.getSpanContext().getTraceId() + + "\"} 204.0 \n" + + "request_size_bytes_sum{animal=\"mouse\",otel_scope_name=\"test\"} 204.0\n" + + "request_size_bytes_created{animal=\"mouse\",otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# EOF\n"; + assertMatches(expected, toOpenMetrics(snapshots)); + } + + @Test + void longHistogramMinimal() throws IOException { + LongHistogram histogram = meter.histogramBuilder("request.size").ofLongs().build(); + histogram.record(173); + histogram.record(173); + histogram.record(100_000); + assertHistogramMinimal(reader.collect()); + } + + @Test + void doubleHistogramMinimal() throws IOException { + DoubleHistogram histogram = meter.histogramBuilder("request.size").build(); + histogram.record(173.0); + histogram.record(173.0); + histogram.record(100_000.0); + assertHistogramMinimal(reader.collect()); + } + + private void assertHistogramMinimal(MetricSnapshots snapshots) throws IOException { + String expected = + "" + + "# TYPE request_size histogram\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"0.0\"} 0\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"5.0\"} 0\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"10.0\"} 0\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"25.0\"} 0\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"50.0\"} 0\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"75.0\"} 0\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"100.0\"} 0\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"250.0\"} 2\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"500.0\"} 2\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"750.0\"} 2\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"1000.0\"} 2\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"2500.0\"} 2\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"5000.0\"} 2\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"7500.0\"} 2\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"10000.0\"} 2\n" + + "request_size_bucket{otel_scope_name=\"test\",le=\"+Inf\"} 3\n" + + "request_size_count{otel_scope_name=\"test\"} 3\n" + + "request_size_sum{otel_scope_name=\"test\"} 100346.0\n" + + "request_size_created{otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# EOF\n"; + assertThat(toOpenMetrics(snapshots)).isEqualTo(expected); + } + + @Test + @Disabled("disabled until #6010 is fixed") + void exponentialLongHistogramComplete() { + LongHistogram histogram = + meter + .histogramBuilder("my.exponential.histogram") + .setDescription("some help text") + .setUnit("By") + .ofLongs() + .build(); + Span span1 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span1.makeCurrent()) { + histogram.record(7, Attributes.builder().put("animal", "bear").build()); + } finally { + span1.end(); + } + histogram.record(0, Attributes.builder().put("animal", "bear").build()); + Span span2 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span2.makeCurrent()) { + histogram.record(3, Attributes.builder().put("animal", "mouse").build()); + } finally { + span2.end(); + } + assertExponentialHistogramComplete(reader.collect(), span1, span2); + } + + @Test + void exponentialDoubleHistogramComplete() { + DoubleHistogram histogram = + meter + .histogramBuilder("my.exponential.histogram") + .setDescription("some help text") + .setUnit("By") + .build(); + Span span1 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span1.makeCurrent()) { + histogram.record(7.0, Attributes.builder().put("animal", "bear").build()); + } finally { + span1.end(); + } + histogram.record(0.0, Attributes.builder().put("animal", "bear").build()); + Span span2 = tracer.spanBuilder("test").startSpan(); + try (Scope ignored = span2.makeCurrent()) { + histogram.record(3.0, Attributes.builder().put("animal", "mouse").build()); + } finally { + span2.end(); + } + assertExponentialHistogramComplete(reader.collect(), span1, span2); + } + + private static void assertExponentialHistogramComplete( + MetricSnapshots snapshots, Span span1, Span span2) { + String expected = + "" + + "name: \"my_exponential_histogram_bytes\"\n" + + "help: \"some help text\"\n" + + "type: HISTOGRAM\n" + + "metric {\n" + + " label {\n" + + " name: \"animal\"\n" + + " value: \"bear\"\n" + + " }\n" + + " label {\n" + + " name: \"otel_scope_name\"\n" + + " value: \"test\"\n" + + " }\n" + + " histogram {\n" + + " sample_count: 2\n" + + " sample_sum: 7.0\n" + + " bucket {\n" + + " cumulative_count: 2\n" + + " upper_bound: Infinity\n" + + " exemplar {\n" + + " label {\n" + + " name: \"span_id\"\n" + + " value: \"" + + span1.getSpanContext().getSpanId() + + "\"\n" + + " }\n" + + " label {\n" + + " name: \"trace_id\"\n" + + " value: \"" + + span1.getSpanContext().getTraceId() + + "\"\n" + + " }\n" + + " value: 7.0\n" + + " timestamp {\n" + + " seconds: \n" + + " \n" + + " }\n" + + " }\n" + + " }\n" + + " schema: 8\n" + + " zero_threshold: 0.0\n" + + " zero_count: 1\n" + + " positive_span {\n" + + " offset: 719\n" + + " length: 1\n" + + " }\n" + + " positive_delta: 1\n" + + " }\n" + + "}\n" + + "metric {\n" + + " label {\n" + + " name: \"animal\"\n" + + " value: \"mouse\"\n" + + " }\n" + + " label {\n" + + " name: \"otel_scope_name\"\n" + + " value: \"test\"\n" + + " }\n" + + " histogram {\n" + + " sample_count: 1\n" + + " sample_sum: 3.0\n" + + " bucket {\n" + + " cumulative_count: 1\n" + + " upper_bound: Infinity\n" + + " exemplar {\n" + + " label {\n" + + " name: \"span_id\"\n" + + " value: \"" + + span2.getSpanContext().getSpanId() + + "\"\n" + + " }\n" + + " label {\n" + + " name: \"trace_id\"\n" + + " value: \"" + + span2.getSpanContext().getTraceId() + + "\"\n" + + " }\n" + + " value: 3.0\n" + + " timestamp {\n" + + " seconds: \n" + + " \n" + + " }\n" + + " }\n" + + " }\n" + + " schema: 8\n" + + " zero_threshold: 0.0\n" + + " zero_count: 0\n" + + " positive_span {\n" + + " offset: 406\n" + + " length: 1\n" + + " }\n" + + " positive_delta: 1\n" + + " }\n" + + "}\n" + + "name: \"target_info\"\n" + + "type: GAUGE\n" + + "metric {\n" + + " label {\n" + + " name: \"service_name\"\n" + + " value: \"unknown_service:java\"\n" + + " }\n" + + " label {\n" + + " name: \"telemetry_sdk_language\"\n" + + " value: \"java\"\n" + + " }\n" + + " label {\n" + + " name: \"telemetry_sdk_name\"\n" + + " value: \"opentelemetry\"\n" + + " }\n" + + " label {\n" + + " name: \"telemetry_sdk_version\"\n" + + " value: \"1.x.x\"\n" + + " }\n" + + " gauge {\n" + + " value: 1.0\n" + + " }\n" + + "}\n"; + assertMatches(expected, toPrometheusProtobuf(snapshots)); + } + + @Test + void exponentialLongHistogramMinimal() { + LongHistogram histogram = meter.histogramBuilder("my.exponential.histogram").ofLongs().build(); + histogram.record(1, Attributes.builder().put("animal", "bear").build()); + assertExponentialHistogramMinimal(reader.collect()); + } + + @Test + void exponentialDoubleHistogramMinimal() { + DoubleHistogram histogram = meter.histogramBuilder("my.exponential.histogram").build(); + histogram.record(1.0, Attributes.builder().put("animal", "bear").build()); + assertExponentialHistogramMinimal(reader.collect()); + } + + private static void assertExponentialHistogramMinimal(MetricSnapshots snapshots) { + String expected = + "" + + "name: \"my_exponential_histogram\"\n" + + "help: \"\"\n" + + "type: HISTOGRAM\n" + + "metric {\n" + + " label {\n" + + " name: \"animal\"\n" + + " value: \"bear\"\n" + + " }\n" + + " label {\n" + + " name: \"otel_scope_name\"\n" + + " value: \"test\"\n" + + " }\n" + + " histogram {\n" + + " sample_count: 1\n" + + " sample_sum: 1.0\n" + + " schema: 8\n" + + " zero_threshold: 0.0\n" + + " zero_count: 0\n" + + " positive_span {\n" + + " offset: 0\n" + + " length: 1\n" + + " }\n" + + " positive_delta: 1\n" + + " }\n" + + "}\n" + + "name: \"target_info\"\n" + + "type: GAUGE\n" + + "metric {\n" + + " label {\n" + + " name: \"service_name\"\n" + + " value: \"unknown_service:java\"\n" + + " }\n" + + " label {\n" + + " name: \"telemetry_sdk_language\"\n" + + " value: \"java\"\n" + + " }\n" + + " label {\n" + + " name: \"telemetry_sdk_name\"\n" + + " value: \"opentelemetry\"\n" + + " }\n" + + " label {\n" + + " name: \"telemetry_sdk_version\"\n" + + " value: \"1.x.x\"\n" + + " }\n" + + " gauge {\n" + + " value: 1.0\n" + + " }\n" + + "}\n"; + assertMatches(expected, toPrometheusProtobuf(snapshots)); + } + + @Test + void exponentialHistogramBucketConversion() { + Random random = new Random(); + for (int i = 0; i < 100_000; i++) { + int otelScale = random.nextInt(24) - 4; + int prometheusScale = Math.min(otelScale, 8); + PrometheusMetricReader reader = + new PrometheusMetricReader(true, /* allowedResourceAttributesFilter= */ null); + Meter meter = + SdkMeterProvider.builder() + .registerMetricReader(reader) + .registerView( + InstrumentSelector.builder().setName("my.exponential.histogram").build(), + View.builder() + .setAggregation(Aggregation.base2ExponentialBucketHistogram(160, otelScale)) + .build()) + .build() + .meterBuilder("test") + .build(); + int orderOfMagnitude = random.nextInt(18) - 9; + double observation = random.nextDouble() * Math.pow(10, orderOfMagnitude); + if (observation == 0) { + continue; + } + DoubleHistogram histogram = meter.histogramBuilder("my.exponential.histogram").build(); + histogram.record(observation); + MetricSnapshots snapshots = reader.collect(); + HistogramSnapshot snapshot = (HistogramSnapshot) snapshots.get(0); + HistogramDataPointSnapshot dataPoint = snapshot.getDataPoints().get(0); + assertThat(dataPoint.getNativeSchema()).isEqualTo(prometheusScale); + NativeHistogramBuckets buckets = dataPoint.getNativeBucketsForPositiveValues(); + assertThat(buckets.size()).isEqualTo(1); + int index = buckets.getBucketIndex(0); + double base = Math.pow(2, Math.pow(2, -prometheusScale)); + double lowerBound = Math.pow(base, index - 1); + double upperBound = Math.pow(base, index); + assertThat(lowerBound).isLessThan(observation); + assertThat(upperBound).isGreaterThanOrEqualTo(observation); + } + } + + @Test + void exponentialLongHistogramScaleDown() { + // The following histogram will have the default scale, which is 20. + DoubleHistogram histogram = meter.histogramBuilder("my.exponential.histogram").build(); + double base = Math.pow(2, Math.pow(2, -20)); + int i; + for (i = 0; i < Math.pow(2, 12); i++) { + histogram.record(Math.pow(base, i)); // one observation per bucket + } + for (int j = 0; j < 10; j++) { + histogram.record(Math.pow(base, i + 2 * j)); // few empty buckets between the observations + } + MetricSnapshots snapshots = reader.collect(); + HistogramSnapshot snapshot = (HistogramSnapshot) snapshots.get(0); + HistogramDataPointSnapshot dataPoint = snapshot.getDataPoints().get(0); + assertThat(dataPoint.getNativeSchema()).isEqualTo(8); // scaled down from 20 to 8. + NativeHistogramBuckets buckets = dataPoint.getNativeBucketsForPositiveValues(); + assertThat(buckets.size()).isEqualTo(3); + // In bucket 0 we have exactly one observation: the value 1.0 + assertThat(buckets.getBucketIndex(0)).isEqualTo(0); + assertThat(buckets.getCount(0)).isEqualTo(1); + // In bucket 1 we have 4095 observations + assertThat(buckets.getBucketIndex(1)).isEqualTo(1); + assertThat(buckets.getCount(1)).isEqualTo(4095); + // In bucket 2 we have 10 observations (despite the empty buckets all observations fall into the + // same bucket at scale 8) + assertThat(buckets.getBucketIndex(2)).isEqualTo(2); + assertThat(buckets.getCount(2)).isEqualTo(10); + } + + @Test + void instrumentationScope() throws IOException { + SdkMeterProvider meterProvider = + SdkMeterProvider.builder() + .setClock(testClock) + .registerMetricReader(this.reader) + .setResource( + Resource.getDefault().toBuilder().put("telemetry.sdk.version", "1.x.x").build()) + .build(); + Meter meter1 = meterProvider.meterBuilder("scopeA").setInstrumentationVersion("1.1").build(); + Meter meter2 = meterProvider.meterBuilder("scopeB").setInstrumentationVersion("1.2").build(); + meter1 + .counterBuilder("processing.time") + .setDescription("processing time in seconds") + .setUnit("s") + .ofDoubles() + .build() + .add(3.3, Attributes.builder().put("a", "b").build()); + meter2 + .counterBuilder("processing.time") + .setDescription("processing time in seconds") + .setUnit("s") + .ofDoubles() + .build() + .add(3.3, Attributes.builder().put("a", "b").build()); + String expected = + "" + + "# TYPE processing_time_seconds counter\n" + + "# UNIT processing_time_seconds seconds\n" + + "# HELP processing_time_seconds processing time in seconds\n" + + "processing_time_seconds_total{a=\"b\",otel_scope_name=\"scopeA\",otel_scope_version=\"1.1\"} 3.3\n" + + "processing_time_seconds_created{a=\"b\",otel_scope_name=\"scopeA\",otel_scope_version=\"1.1\"} " + + createdTimestamp + + "\n" + + "processing_time_seconds_total{a=\"b\",otel_scope_name=\"scopeB\",otel_scope_version=\"1.2\"} 3.3\n" + + "processing_time_seconds_created{a=\"b\",otel_scope_name=\"scopeB\",otel_scope_version=\"1.2\"} " + + createdTimestamp + + "\n" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# EOF\n"; + assertThat(toOpenMetrics(reader.collect())).isEqualTo(expected); + } + + @Test + void nameSuffix() throws IOException { + LongCounter unitAndTotal = + meter.counterBuilder("request.duration.seconds.total").setUnit("s").build(); + unitAndTotal.add(1); + LongCounter unitOnly = meter.counterBuilder("response.duration.seconds").setUnit("s").build(); + unitOnly.add(2); + LongCounter totalOnly = meter.counterBuilder("processing.duration.total").setUnit("s").build(); + totalOnly.add(3); + LongCounter noSuffix = meter.counterBuilder("queue.time").setUnit("s").build(); + noSuffix.add(4); + String expected = + "" + + "# TYPE processing_duration_seconds counter\n" + + "# UNIT processing_duration_seconds seconds\n" + + "processing_duration_seconds_total{otel_scope_name=\"test\"} 3.0\n" + + "processing_duration_seconds_created{otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "# TYPE queue_time_seconds counter\n" + + "# UNIT queue_time_seconds seconds\n" + + "queue_time_seconds_total{otel_scope_name=\"test\"} 4.0\n" + + "queue_time_seconds_created{otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "# TYPE request_duration_seconds counter\n" + + "# UNIT request_duration_seconds seconds\n" + + "request_duration_seconds_total{otel_scope_name=\"test\"} 1.0\n" + + "request_duration_seconds_created{otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "# TYPE response_duration_seconds counter\n" + + "# UNIT response_duration_seconds seconds\n" + + "response_duration_seconds_total{otel_scope_name=\"test\"} 2.0\n" + + "response_duration_seconds_created{otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# EOF\n"; + assertThat(toOpenMetrics(reader.collect())).isEqualTo(expected); + } + + @Test + void nameSuffixUnit() throws IOException { + LongCounter counter = meter.counterBuilder("request.duration.seconds").setUnit("s").build(); + counter.add(1); + String expected = + "" + + "# TYPE request_duration_seconds counter\n" + + "# UNIT request_duration_seconds seconds\n" + + "request_duration_seconds_total{otel_scope_name=\"test\"} 1.0\n" + + "request_duration_seconds_created{otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# EOF\n"; + assertThat(toOpenMetrics(reader.collect())).isEqualTo(expected); + } + + @Test + void illegalCharacters() throws IOException { + LongCounter counter = meter.counterBuilder("prod/request.count").build(); + counter.add(1, Attributes.builder().put("user-count", 30).build()); + String expected = + "" + + "# TYPE prod_request_count counter\n" + + "prod_request_count_total{otel_scope_name=\"test\",user_count=\"30\"} 1.0\n" + + "prod_request_count_created{otel_scope_name=\"test\",user_count=\"30\"} " + + createdTimestamp + + "\n" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# EOF\n"; + assertThat(toOpenMetrics(reader.collect())).isEqualTo(expected); + } + + @Test + void createdTimestamp() throws IOException { + + LongCounter counter = meter.counterBuilder("requests").build(); + testClock.advance(Duration.ofMillis(1)); + counter.add(3, Attributes.builder().put("animal", "bear").build()); + testClock.advance(Duration.ofMillis(1)); + counter.add(2, Attributes.builder().put("animal", "mouse").build()); + testClock.advance(Duration.ofMillis(1)); + + // There is a curious difference between Prometheus and OpenTelemetry: + // In Prometheus metrics the _created timestamp is per data point, + // i.e. the _created timestamp says when this specific set of label values + // was first observed. + // In the OTel Java SDK the _created timestamp is the initialization time + // of the SdkMeterProvider, i.e. all data points will have the same _created timestamp. + // So we expect the _created timestamp to be the start time of the application, + // not the timestamp when the counter or an individual data point was created. + String expected = + "" + + "# TYPE requests counter\n" + + "requests_total{animal=\"bear\",otel_scope_name=\"test\"} 3.0\n" + + "requests_created{animal=\"bear\",otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "requests_total{animal=\"mouse\",otel_scope_name=\"test\"} 2.0\n" + + "requests_created{animal=\"mouse\",otel_scope_name=\"test\"} " + + createdTimestamp + + "\n" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# EOF\n"; + assertThat(toOpenMetrics(reader.collect())).isEqualTo(expected); + } + + @Test + void otelScopeComplete() throws IOException { + // There is currently no API for adding scope attributes. + // However, we can at least test the otel_scope_version attribute. + Meter meter = + SdkMeterProvider.builder() + .setClock(testClock) + .registerMetricReader(this.reader) + .setResource( + Resource.getDefault().toBuilder().put("telemetry.sdk.version", "1.x.x").build()) + .build() + .meterBuilder("test-scope") + .setInstrumentationVersion("a.b.c") + .build(); + LongCounter counter = meter.counterBuilder("test.count").build(); + counter.add(1); + String expected = + "" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# TYPE test_count counter\n" + + "test_count_total{otel_scope_name=\"test-scope\",otel_scope_version=\"a.b.c\"} 1.0\n" + + "test_count_created{otel_scope_name=\"test-scope\",otel_scope_version=\"a.b.c\"} " + + createdTimestamp + + "\n" + + "# EOF\n"; + assertThat(toOpenMetrics(reader.collect())).isEqualTo(expected); + } + + @Test + void otelScopeDisabled() throws IOException { + PrometheusMetricReader reader = + new PrometheusMetricReader(false, /* allowedResourceAttributesFilter= */ null); + Meter meter = + SdkMeterProvider.builder() + .setClock(testClock) + .registerMetricReader(reader) + .setResource( + Resource.getDefault().toBuilder().put("telemetry.sdk.version", "1.x.x").build()) + .build() + .meterBuilder("test-scope") + .setInstrumentationVersion("a.b.c") + .build(); + LongCounter counter = meter.counterBuilder("test.count").build(); + counter.add(1); + String expected = + "" + + "# TYPE target info\n" + + "target_info{service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# TYPE test_count counter\n" + + "test_count_total 1.0\n" + + "test_count_created " + + createdTimestamp + + "\n" + + "# EOF\n"; + assertThat(toOpenMetrics(reader.collect())).isEqualTo(expected); + } + + @SuppressWarnings("resource") + @Test + void addResourceAttributesWorks() throws IOException { + PrometheusMetricReader reader = + new PrometheusMetricReader( + true, /* allowedResourceAttributesFilter= */ Predicates.is("cluster")); + Meter meter = + SdkMeterProvider.builder() + .setClock(testClock) + .registerMetricReader(reader) + .setResource( + Resource.getDefault().toBuilder() + .put("cluster", "my.cluster") + .put("telemetry.sdk.version", "1.x.x") + .build()) + .build() + .meterBuilder("test-scope") + .setInstrumentationVersion("a.b.c") + .build(); + LongCounter counter = meter.counterBuilder("test.count").build(); + counter.add(1); + String expected = + "" + + "# TYPE target info\n" + + "target_info{cluster=\"my.cluster\",service_name=\"unknown_service:java\",telemetry_sdk_language=\"java\",telemetry_sdk_name=\"opentelemetry\",telemetry_sdk_version=\"1.x.x\"} 1\n" + + "# TYPE test_count counter\n" + + // In both those metrics we expect the "cluster" label to exist + + "test_count_total{cluster=\"my.cluster\",otel_scope_name=\"test-scope\",otel_scope_version=\"a.b.c\"} 1.0\n" + + "test_count_created{cluster=\"my.cluster\",otel_scope_name=\"test-scope\",otel_scope_version=\"a.b.c\"} " + + createdTimestamp + + "\n" + + "# EOF\n"; + assertThat(toOpenMetrics(reader.collect())).isEqualTo(expected); + } + + /** + * Unfortunately there is no easy way to use {@link TestClock} for Exemplar timestamps. Test if + * {@code expected} equals {@code actual} but {@code } matches arbitrary timestamps. + */ + private static void assertMatches(String expected, String actual) { + String regex = toPattern(expected); + assertThat(actual) + .as("Expected: " + expected + "\nActual: " + actual) + .matches(Pattern.compile(regex)); + } + + /** + * Replace non-deterministic portions of {@code expected} with regex patterns. Other portions are + * quoted such that must match exactly. The following sequences are replaced: + * + *

    + *
  • {@code } + *
  • {@code } + *
  • {@code } + *
  • {@code } + *
+ */ + private static String toPattern(String expected) { + Map replacePatterns = new HashMap<>(); + String timestampPattern = "[0-9]+(\\.[0-9]+)?"; + replacePatterns.put("timestamp", timestampPattern); + replacePatterns.put("maybeNanos", String.format("(nanos: %s)?", timestampPattern)); + replacePatterns.put("spanId", "[a-z0-9]*"); + replacePatterns.put("traceId", "[a-z0-9]*"); + replacePatterns.put("measurement", "[0-9\\.]*"); + + Matcher matcher = Pattern.compile("\\<([a-zA-Z]*)\\>").matcher(expected); + if (!matcher.find()) { + return Pattern.quote(expected); + } + int offset = 0; + StringBuilder regexBuilder = new StringBuilder(); + do { + MatchResult matchResult = matcher.toMatchResult(); + String key = matchResult.group(1); + String pattern = replacePatterns.getOrDefault(key, key); + regexBuilder + .append(Pattern.quote(expected.substring(offset, matchResult.start()))) + .append(pattern); + offset = matchResult.end(); + } while (matcher.find()); + if (offset != expected.length()) { + regexBuilder.append(Pattern.quote(expected.substring(offset))); + } + return regexBuilder.toString(); + } + + private static String toOpenMetrics(MetricSnapshots snapshots) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); + writer.write(out, snapshots); + return out.toString(StandardCharsets.UTF_8.name()); + } + + private static String toPrometheusProtobuf(MetricSnapshots snapshots) { + PrometheusProtobufWriter writer = new PrometheusProtobufWriter(); + return writer.toDebugString(snapshots); + } + + private static String convertTimestamp(long nanoTime) { + String millis = Long.toString(TimeUnit.NANOSECONDS.toMillis(nanoTime)); + return millis.substring(0, millis.length() - 3) + "." + millis.substring(millis.length() - 3); + } +} diff --git a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusUnitsHelperTest.java b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusUnitsHelperTest.java index 2a8d01f0ce3..658642c024a 100644 --- a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusUnitsHelperTest.java +++ b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/PrometheusUnitsHelperTest.java @@ -6,7 +6,9 @@ package io.opentelemetry.exporter.prometheus; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import io.prometheus.metrics.model.snapshots.Unit; import java.util.stream.Stream; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; @@ -16,8 +18,13 @@ class PrometheusUnitsHelperTest { @ParameterizedTest @MethodSource("providePrometheusOTelUnitEquivalentPairs") - public void testPrometheusUnitEquivalency(String otlpUnit, String prometheusUnit) { - assertEquals(prometheusUnit, PrometheusUnitsHelper.getEquivalentPrometheusUnit(otlpUnit)); + public void testPrometheusUnitEquivalency(String otlpUnit, String expectedPrometheusUnit) { + Unit actualPrometheusUnit = PrometheusUnitsHelper.convertUnit(otlpUnit); + if (expectedPrometheusUnit == null) { + assertNull(actualPrometheusUnit); + } else { + assertEquals(expectedPrometheusUnit, actualPrometheusUnit.toString()); + } } private static Stream providePrometheusOTelUnitEquivalentPairs() { @@ -63,46 +70,34 @@ private static Stream providePrometheusOTelUnitEquivalentPairs() { // Unit not found - Case sensitive Arguments.of("S", "S"), // Special case - 1 - Arguments.of("1", ""), + Arguments.of("1", "ratio"), // Special Case - Drop metric units in {} - Arguments.of("{packets}", ""), + Arguments.of("{packets}", null), // Special Case - Dropped metric units only in {} Arguments.of("{packets}V", "volts"), // Special Case - Dropped metric units with 'per' unit handling applicable - Arguments.of("{scanned}/{returned}", ""), + Arguments.of("{scanned}/{returned}", null), // Special Case - Dropped metric units with 'per' unit handling applicable Arguments.of("{objects}/s", "per_second"), // Units expressing rate - 'per' units, both units expanded Arguments.of("m/s", "meters_per_second"), // Units expressing rate - per minute - Arguments.of("m/m", "meters_per_minute"), + Arguments.of("m/min", "meters_per_minute"), // Units expressing rate - per day Arguments.of("A/d", "amperes_per_day"), // Units expressing rate - per week - Arguments.of("W/w", "watts_per_week"), + Arguments.of("W/wk", "watts_per_week"), // Units expressing rate - per month Arguments.of("J/mo", "joules_per_month"), // Units expressing rate - per year - Arguments.of("TBy/y", "terabytes_per_year"), + Arguments.of("TBy/a", "terabytes_per_year"), // Units expressing rate - 'per' units, both units unknown Arguments.of("v/v", "v_per_v"), // Units expressing rate - 'per' units, first unit unknown Arguments.of("km/h", "km_per_hour"), // Units expressing rate - 'per' units, 'per' unit unknown - Arguments.of("g/g", "grams_per_g"), + Arguments.of("g/x", "grams_per_x"), // Misc - unit containing known abbreviations improperly formatted - Arguments.of("watts_W", "watts_W"), - // Unsupported symbols - Arguments.of("°F", "F"), - // Unsupported symbols - multiple - Arguments.of("unit+=.:,!* & #unused", "unit_unused"), - // Unsupported symbols - 'per' units - Arguments.of("__test $/°C", "test_per_C"), - // Unsupported symbols - whitespace - Arguments.of("\t", ""), - // Null unit - Arguments.of(null, null), - // Misc - unit cleanup - no case match special char - Arguments.of("$1000", "1000")); + Arguments.of("watts_W", "watts_W")); } } diff --git a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/SerializerTest.java b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/SerializerTest.java deleted file mode 100644 index 13bc2752ce5..00000000000 --- a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/SerializerTest.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.prometheus; - -import static io.opentelemetry.exporter.prometheus.TestConstants.CUMULATIVE_HISTOGRAM_NO_ATTRIBUTES; -import static io.opentelemetry.exporter.prometheus.TestConstants.CUMULATIVE_HISTOGRAM_SINGLE_ATTRIBUTE; -import static io.opentelemetry.exporter.prometheus.TestConstants.DELTA_DOUBLE_SUM; -import static io.opentelemetry.exporter.prometheus.TestConstants.DELTA_HISTOGRAM; -import static io.opentelemetry.exporter.prometheus.TestConstants.DELTA_LONG_SUM; -import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE; -import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES; -import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE_NO_ATTRIBUTES; -import static io.opentelemetry.exporter.prometheus.TestConstants.LONG_GAUGE; -import static io.opentelemetry.exporter.prometheus.TestConstants.MONOTONIC_CUMULATIVE_DOUBLE_SUM; -import static io.opentelemetry.exporter.prometheus.TestConstants.MONOTONIC_CUMULATIVE_DOUBLE_SUM_WITH_SUFFIX_TOTAL; -import static io.opentelemetry.exporter.prometheus.TestConstants.MONOTONIC_CUMULATIVE_LONG_SUM; -import static io.opentelemetry.exporter.prometheus.TestConstants.NON_MONOTONIC_CUMULATIVE_DOUBLE_SUM; -import static io.opentelemetry.exporter.prometheus.TestConstants.NON_MONOTONIC_CUMULATIVE_LONG_SUM; -import static io.opentelemetry.exporter.prometheus.TestConstants.SUMMARY; -import static org.assertj.core.api.Assertions.assertThat; - -import io.opentelemetry.sdk.metrics.data.MetricData; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.Arrays; -import org.junit.jupiter.api.Test; - -class SerializerTest { - - @Test - void prometheus004() { - // Same output as prometheus client library except for these changes which are compatible with - // Prometheus - // TYPE / HELP line order reversed - // Attributes do not end in trailing comma - assertThat( - serialize004( - MONOTONIC_CUMULATIVE_DOUBLE_SUM, - MONOTONIC_CUMULATIVE_DOUBLE_SUM_WITH_SUFFIX_TOTAL, - NON_MONOTONIC_CUMULATIVE_DOUBLE_SUM, - DELTA_DOUBLE_SUM, // Deltas are dropped - MONOTONIC_CUMULATIVE_LONG_SUM, - NON_MONOTONIC_CUMULATIVE_LONG_SUM, - DELTA_LONG_SUM, // Deltas are dropped - DOUBLE_GAUGE, - LONG_GAUGE, - SUMMARY, - DELTA_HISTOGRAM, // Deltas are dropped - CUMULATIVE_HISTOGRAM_NO_ATTRIBUTES, - CUMULATIVE_HISTOGRAM_SINGLE_ATTRIBUTE, - DOUBLE_GAUGE_NO_ATTRIBUTES, - DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES)) - .isEqualTo( - "# TYPE target info\n" - + "# HELP target Target metadata\n" - + "target_info{kr=\"vr\"} 1\n" - + "# TYPE otel_scope_info info\n" - + "# HELP otel_scope_info Scope metadata\n" - + "otel_scope_info{otel_scope_name=\"full\",otel_scope_version=\"version\",ks=\"vs\"} 1\n" - + "# TYPE monotonic_cumulative_double_sum_seconds_total counter\n" - + "# HELP monotonic_cumulative_double_sum_seconds_total description\n" - + "monotonic_cumulative_double_sum_seconds_total{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"mcds\"} 5.0 1633950672000\n" - + "# TYPE monotonic_cumulative_double_sum_suffix_seconds_total counter\n" - + "# HELP monotonic_cumulative_double_sum_suffix_seconds_total description\n" - + "monotonic_cumulative_double_sum_suffix_seconds_total{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"mcds\"} 5.0 1633950672000\n" - + "# TYPE non_monotonic_cumulative_double_sum_seconds gauge\n" - + "# HELP non_monotonic_cumulative_double_sum_seconds description\n" - + "non_monotonic_cumulative_double_sum_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"nmcds\"} 5.0 1633950672000\n" - + "# TYPE monotonic_cumulative_long_sum_seconds_total counter\n" - + "# HELP monotonic_cumulative_long_sum_seconds_total unused\n" - + "monotonic_cumulative_long_sum_seconds_total{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"mcls\"} 5.0 1633950672000\n" - + "# TYPE non_monotonic_cumulative_long_sum_seconds gauge\n" - + "# HELP non_monotonic_cumulative_long_sum_seconds unused\n" - + "non_monotonic_cumulative_long_sum_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"nmcls\"} 5.0 1633950672000\n" - + "# TYPE double_gauge_seconds gauge\n" - + "# HELP double_gauge_seconds unused\n" - + "double_gauge_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"dg\"} 5.0 1633950672000\n" - + "# TYPE long_gauge_seconds gauge\n" - + "# HELP long_gauge_seconds unused\n" - + "long_gauge_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"lg\"} 5.0 1633950672000\n" - + "# TYPE summary_seconds summary\n" - + "# HELP summary_seconds unused\n" - + "summary_seconds_count{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\"} 5.0 1633950672000\n" - + "summary_seconds_sum{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\"} 7.0 1633950672000\n" - + "summary_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\",quantile=\"0.9\"} 0.1 1633950672000\n" - + "summary_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\",quantile=\"0.99\"} 0.3 1633950672000\n" - + "# TYPE cumulative_histogram_no_attributes_seconds histogram\n" - + "# HELP cumulative_histogram_no_attributes_seconds unused\n" - + "cumulative_histogram_no_attributes_seconds_count{otel_scope_name=\"full\",otel_scope_version=\"version\"} 2.0 1633950672000\n" - + "cumulative_histogram_no_attributes_seconds_sum{otel_scope_name=\"full\",otel_scope_version=\"version\"} 1.0 1633950672000\n" - + "cumulative_histogram_no_attributes_seconds_bucket{otel_scope_name=\"full\",otel_scope_version=\"version\",le=\"+Inf\"} 2.0 1633950672000\n" - + "# TYPE cumulative_histogram_single_attribute_seconds histogram\n" - + "# HELP cumulative_histogram_single_attribute_seconds unused\n" - + "cumulative_histogram_single_attribute_seconds_count{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"hs\"} 2.0 1633950672000\n" - + "cumulative_histogram_single_attribute_seconds_sum{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"hs\"} 1.0 1633950672000\n" - + "cumulative_histogram_single_attribute_seconds_bucket{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"hs\",le=\"+Inf\"} 2.0 1633950672000\n" - + "# TYPE double_gauge_no_attributes_seconds gauge\n" - + "# HELP double_gauge_no_attributes_seconds unused\n" - + "double_gauge_no_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\"} 7.0 1633950672000\n" - + "# TYPE double_gauge_multiple_attributes_seconds gauge\n" - + "# HELP double_gauge_multiple_attributes_seconds unused\n" - + "double_gauge_multiple_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",animal=\"bear\",type=\"dgma\"} 8.0 1633950672000\n"); - } - - @Test - void openMetrics() { - assertThat( - serializeOpenMetrics( - MONOTONIC_CUMULATIVE_DOUBLE_SUM, - MONOTONIC_CUMULATIVE_DOUBLE_SUM_WITH_SUFFIX_TOTAL, - NON_MONOTONIC_CUMULATIVE_DOUBLE_SUM, - DELTA_DOUBLE_SUM, // Deltas are dropped - MONOTONIC_CUMULATIVE_LONG_SUM, - NON_MONOTONIC_CUMULATIVE_LONG_SUM, - DELTA_LONG_SUM, // Deltas are dropped - DOUBLE_GAUGE, - LONG_GAUGE, - SUMMARY, - DELTA_HISTOGRAM, // Deltas are dropped - CUMULATIVE_HISTOGRAM_NO_ATTRIBUTES, - CUMULATIVE_HISTOGRAM_SINGLE_ATTRIBUTE, - DOUBLE_GAUGE_NO_ATTRIBUTES, - DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES)) - .isEqualTo( - "# TYPE target info\n" - + "# HELP target Target metadata\n" - + "target_info{kr=\"vr\"} 1\n" - + "# TYPE otel_scope_info info\n" - + "# HELP otel_scope_info Scope metadata\n" - + "otel_scope_info{otel_scope_name=\"full\",otel_scope_version=\"version\",ks=\"vs\"} 1\n" - + "# TYPE monotonic_cumulative_double_sum_seconds counter\n" - + "# HELP monotonic_cumulative_double_sum_seconds description\n" - + "monotonic_cumulative_double_sum_seconds_total{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"mcds\"} 5.0 1633950672.000\n" - + "# TYPE monotonic_cumulative_double_sum_suffix_seconds_total counter\n" - + "# HELP monotonic_cumulative_double_sum_suffix_seconds_total description\n" - + "monotonic_cumulative_double_sum_suffix_seconds_total{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"mcds\"} 5.0 1633950672.000\n" - + "# TYPE non_monotonic_cumulative_double_sum_seconds gauge\n" - + "# HELP non_monotonic_cumulative_double_sum_seconds description\n" - + "non_monotonic_cumulative_double_sum_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"nmcds\"} 5.0 1633950672.000\n" - + "# TYPE monotonic_cumulative_long_sum_seconds counter\n" - + "# HELP monotonic_cumulative_long_sum_seconds unused\n" - + "monotonic_cumulative_long_sum_seconds_total{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"mcls\"} 5.0 1633950672.000\n" - + "# TYPE non_monotonic_cumulative_long_sum_seconds gauge\n" - + "# HELP non_monotonic_cumulative_long_sum_seconds unused\n" - + "non_monotonic_cumulative_long_sum_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"nmcls\"} 5.0 1633950672.000\n" - + "# TYPE double_gauge_seconds gauge\n" - + "# HELP double_gauge_seconds unused\n" - + "double_gauge_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"dg\"} 5.0 1633950672.000\n" - + "# TYPE long_gauge_seconds gauge\n" - + "# HELP long_gauge_seconds unused\n" - + "long_gauge_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"lg\"} 5.0 1633950672.000\n" - + "# TYPE summary_seconds summary\n" - + "# HELP summary_seconds unused\n" - + "summary_seconds_count{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\"} 5.0 1633950672.000\n" - + "summary_seconds_sum{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\"} 7.0 1633950672.000\n" - + "summary_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\",quantile=\"0.9\"} 0.1 1633950672.000\n" - + "summary_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\",quantile=\"0.99\"} 0.3 1633950672.000\n" - + "# TYPE cumulative_histogram_no_attributes_seconds histogram\n" - + "# HELP cumulative_histogram_no_attributes_seconds unused\n" - + "cumulative_histogram_no_attributes_seconds_count{otel_scope_name=\"full\",otel_scope_version=\"version\"} 2.0 1633950672.000\n" - + "cumulative_histogram_no_attributes_seconds_sum{otel_scope_name=\"full\",otel_scope_version=\"version\"} 1.0 1633950672.000\n" - + "cumulative_histogram_no_attributes_seconds_bucket{otel_scope_name=\"full\",otel_scope_version=\"version\",le=\"+Inf\"} 2.0 1633950672.000 # {span_id=\"0000000000000002\",trace_id=\"00000000000000000000000000000001\"} 4.0 0.001\n" - + "# TYPE cumulative_histogram_single_attribute_seconds histogram\n" - + "# HELP cumulative_histogram_single_attribute_seconds unused\n" - + "cumulative_histogram_single_attribute_seconds_count{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"hs\"} 2.0 1633950672.000\n" - + "cumulative_histogram_single_attribute_seconds_sum{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"hs\"} 1.0 1633950672.000\n" - + "cumulative_histogram_single_attribute_seconds_bucket{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"hs\",le=\"+Inf\"} 2.0 1633950672.000 # {span_id=\"0000000000000002\",trace_id=\"00000000000000000000000000000001\"} 4.0 0.001\n" - + "# TYPE double_gauge_no_attributes_seconds gauge\n" - + "# HELP double_gauge_no_attributes_seconds unused\n" - + "double_gauge_no_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\"} 7.0 1633950672.000\n" - + "# TYPE double_gauge_multiple_attributes_seconds gauge\n" - + "# HELP double_gauge_multiple_attributes_seconds unused\n" - + "double_gauge_multiple_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",animal=\"bear\",type=\"dgma\"} 8.0 1633950672.000\n" - + "# EOF\n"); - } - - private static String serialize004(MetricData... metrics) { - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - try { - new Serializer.Prometheus004Serializer(unused -> true).write(Arrays.asList(metrics), bos); - return bos.toString("UTF-8"); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - private static String serializeOpenMetrics(MetricData... metrics) { - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - try { - new Serializer.OpenMetrics100Serializer(unused -> true).write(Arrays.asList(metrics), bos); - return bos.toString("UTF-8"); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } -} diff --git a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/TestConstants.java b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/TestConstants.java deleted file mode 100644 index 10869229e06..00000000000 --- a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/TestConstants.java +++ /dev/null @@ -1,360 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.prometheus; - -import static io.opentelemetry.api.common.AttributeKey.stringKey; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.trace.SpanContext; -import io.opentelemetry.api.trace.TraceFlags; -import io.opentelemetry.api.trace.TraceState; -import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.metrics.data.AggregationTemporality; -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryPointData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableValueAtQuantile; -import io.opentelemetry.sdk.resources.Resource; -import java.util.Arrays; -import java.util.Collections; -import java.util.concurrent.TimeUnit; - -/** A helper class encapsulating immutable static data that can be shared across all the tests. */ -class TestConstants { - - private TestConstants() { - // Private constructor to prevent instantiation - } - - private static final AttributeKey TYPE = stringKey("type"); - - static final MetricData MONOTONIC_CUMULATIVE_DOUBLE_SUM = - ImmutableMetricData.createDoubleSum( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "monotonic.cumulative.double.sum", - "description", - "s", - ImmutableSumData.create( - /* isMonotonic= */ true, - AggregationTemporality.CUMULATIVE, - Collections.singletonList( - ImmutableDoublePointData.create( - 1633947011000000000L, - 1633950672000000000L, - Attributes.of(TYPE, "mcds"), - 5)))); - - static final MetricData MONOTONIC_CUMULATIVE_DOUBLE_SUM_WITH_SUFFIX_TOTAL = - ImmutableMetricData.createDoubleSum( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "monotonic.cumulative.double.sum.suffix.total", - "description", - "s", - ImmutableSumData.create( - /* isMonotonic= */ true, - AggregationTemporality.CUMULATIVE, - Collections.singletonList( - ImmutableDoublePointData.create( - 1633947011000000000L, - 1633950672000000000L, - Attributes.of(TYPE, "mcds"), - 5)))); - - static final MetricData NON_MONOTONIC_CUMULATIVE_DOUBLE_SUM = - ImmutableMetricData.createDoubleSum( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "non.monotonic.cumulative.double.sum", - "description", - "s", - ImmutableSumData.create( - /* isMonotonic= */ false, - AggregationTemporality.CUMULATIVE, - Collections.singletonList( - ImmutableDoublePointData.create( - 1633947011000000000L, - 1633950672000000000L, - Attributes.of(TYPE, "nmcds"), - 5)))); - - static final MetricData DELTA_DOUBLE_SUM = - ImmutableMetricData.createDoubleSum( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "delta.double.sum", - "unused", - "s", - ImmutableSumData.create( - /* isMonotonic= */ true, - AggregationTemporality.DELTA, - Collections.singletonList( - ImmutableDoublePointData.create( - 1633947011000000000L, - 1633950672000000000L, - Attributes.of(TYPE, "mdds"), - 5)))); - - static final MetricData MONOTONIC_CUMULATIVE_LONG_SUM = - ImmutableMetricData.createLongSum( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "monotonic.cumulative.long.sum", - "unused", - "s", - ImmutableSumData.create( - /* isMonotonic= */ true, - AggregationTemporality.CUMULATIVE, - Collections.singletonList( - ImmutableLongPointData.create( - 1633947011000000000L, - 1633950672000000000L, - Attributes.of(TYPE, "mcls"), - 5)))); - - static final MetricData NON_MONOTONIC_CUMULATIVE_LONG_SUM = - ImmutableMetricData.createLongSum( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "non.monotonic.cumulative.long_sum", - "unused", - "s", - ImmutableSumData.create( - /* isMonotonic= */ false, - AggregationTemporality.CUMULATIVE, - Collections.singletonList( - ImmutableLongPointData.create( - 1633947011000000000L, - 1633950672000000000L, - Attributes.of(TYPE, "nmcls"), - 5)))); - static final MetricData DELTA_LONG_SUM = - ImmutableMetricData.createLongSum( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "delta.long.sum", - "unused", - "s", - ImmutableSumData.create( - /* isMonotonic= */ true, - AggregationTemporality.DELTA, - Collections.singletonList( - ImmutableLongPointData.create( - 1633947011000000000L, - 1633950672000000000L, - Attributes.of(TYPE, "mdls"), - 5)))); - - static final MetricData DOUBLE_GAUGE = - ImmutableMetricData.createDoubleGauge( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "double.gauge", - "unused", - "s", - ImmutableGaugeData.create( - Collections.singletonList( - ImmutableDoublePointData.create( - 1633947011000000000L, 1633950672000000000L, Attributes.of(TYPE, "dg"), 5)))); - static final MetricData LONG_GAUGE = - ImmutableMetricData.createLongGauge( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "long.gauge", - "unused", - "s", - ImmutableGaugeData.create( - Collections.singletonList( - ImmutableLongPointData.create( - 1633947011000000000L, 1633950672000000000L, Attributes.of(TYPE, "lg"), 5)))); - static final MetricData SUMMARY = - ImmutableMetricData.createDoubleSummary( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "summary", - "unused", - "s", - ImmutableSummaryData.create( - Collections.singletonList( - ImmutableSummaryPointData.create( - 1633947011000000000L, - 1633950672000000000L, - Attributes.of(TYPE, "s"), - 5, - 7, - Arrays.asList( - ImmutableValueAtQuantile.create(0.9, 0.1), - ImmutableValueAtQuantile.create(0.99, 0.3)))))); - - static final MetricData DELTA_HISTOGRAM = - ImmutableMetricData.createDoubleHistogram( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "delta.histogram", - "unused", - "s", - ImmutableHistogramData.create( - AggregationTemporality.DELTA, - Collections.singletonList( - ImmutableHistogramPointData.create( - 1633947011000000000L, - 1633950672000000000L, - Attributes.empty(), - 1.0, - /* hasMin= */ false, - 0, - /* hasMax= */ false, - 0, - Collections.emptyList(), - Collections.singletonList(2L), - Collections.emptyList())))); - - static final MetricData CUMULATIVE_HISTOGRAM_NO_ATTRIBUTES = - ImmutableMetricData.createDoubleHistogram( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "cumulative.histogram.no.attributes", - "unused", - "s", - ImmutableHistogramData.create( - AggregationTemporality.CUMULATIVE, - Collections.singletonList( - ImmutableHistogramPointData.create( - 1633947011000000000L, - 1633950672000000000L, - Attributes.empty(), - 1.0, - /* hasMin= */ false, - 0, - /* hasMax= */ false, - 0, - Collections.emptyList(), - Collections.singletonList(2L), - Collections.singletonList( - ImmutableDoubleExemplarData.create( - Attributes.empty(), - TimeUnit.MILLISECONDS.toNanos(1L), - SpanContext.create( - "00000000000000000000000000000001", - "0000000000000002", - TraceFlags.getDefault(), - TraceState.getDefault()), - /* value= */ 4)))))); - - static final MetricData CUMULATIVE_HISTOGRAM_SINGLE_ATTRIBUTE = - ImmutableMetricData.createDoubleHistogram( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "cumulative.histogram.single.attribute", - "unused", - "s", - ImmutableHistogramData.create( - AggregationTemporality.CUMULATIVE, - Collections.singletonList( - ImmutableHistogramPointData.create( - 1633947011000000000L, - 1633950672000000000L, - Attributes.of(TYPE, "hs"), - 1.0, - /* hasMin= */ false, - 0, - /* hasMax= */ false, - 0, - Collections.emptyList(), - Collections.singletonList(2L), - Collections.singletonList( - ImmutableDoubleExemplarData.create( - Attributes.empty(), - TimeUnit.MILLISECONDS.toNanos(1L), - SpanContext.create( - "00000000000000000000000000000001", - "0000000000000002", - TraceFlags.getDefault(), - TraceState.getDefault()), - /* value= */ 4)))))); - - static final MetricData DOUBLE_GAUGE_NO_ATTRIBUTES = - ImmutableMetricData.createDoubleGauge( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "double.gauge.no.attributes", - "unused", - "s", - ImmutableGaugeData.create( - Collections.singletonList( - ImmutableDoublePointData.create( - 1633947011000000000L, 1633950672000000000L, Attributes.empty(), 7)))); - - static final MetricData DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES = - ImmutableMetricData.createDoubleGauge( - Resource.create(Attributes.of(stringKey("kr"), "vr")), - InstrumentationScopeInfo.builder("full") - .setVersion("version") - .setAttributes(Attributes.of(stringKey("ks"), "vs")) - .build(), - "double.gauge.multiple.attributes", - "unused", - "s", - ImmutableGaugeData.create( - Collections.singletonList( - ImmutableDoublePointData.create( - 1633947011000000000L, - 1633950672000000000L, - Attributes.of(TYPE, "dgma", stringKey("animal"), "bear"), - 8)))); -} diff --git a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/internal/PrometheusMetricReaderProviderTest.java b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/internal/PrometheusMetricReaderProviderTest.java index 9cc1acd2614..1c00a604e17 100644 --- a/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/internal/PrometheusMetricReaderProviderTest.java +++ b/exporters/prometheus/src/test/java/io/opentelemetry/exporter/prometheus/internal/PrometheusMetricReaderProviderTest.java @@ -7,14 +7,20 @@ import static org.assertj.core.api.Assertions.as; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.when; import com.sun.net.httpserver.HttpServer; import io.opentelemetry.exporter.prometheus.PrometheusHttpServer; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.export.MetricReader; +import io.prometheus.metrics.exporter.httpserver.HTTPServer; import java.io.IOException; import java.net.ServerSocket; import java.util.HashMap; @@ -49,12 +55,16 @@ void createMetricReader_Default() throws IOException { try (MetricReader metricReader = provider.createMetricReader(configProperties)) { assertThat(metricReader) .isInstanceOf(PrometheusHttpServer.class) + .extracting("httpServer", as(InstanceOfAssertFactories.type(HTTPServer.class))) .extracting("server", as(InstanceOfAssertFactories.type(HttpServer.class))) .satisfies( server -> { assertThat(server.getAddress().getHostName()).isEqualTo("0:0:0:0:0:0:0:0"); assertThat(server.getAddress().getPort()).isEqualTo(9464); }); + assertThat(metricReader.getMemoryMode()).isEqualTo(MemoryMode.REUSABLE_DATA); + assertThat(metricReader.getDefaultAggregation(InstrumentType.HISTOGRAM)) + .isEqualTo(Aggregation.defaultAggregation()); } } @@ -71,6 +81,10 @@ void createMetricReader_WithConfiguration() throws IOException { Map config = new HashMap<>(); config.put("otel.exporter.prometheus.host", "localhost"); config.put("otel.exporter.prometheus.port", String.valueOf(port)); + config.put("otel.java.exporter.memory_mode", "immutable_data"); + config.put( + "otel.java.experimental.exporter.prometheus.metrics.default.histogram.aggregation", + "BASE2_EXPONENTIAL_BUCKET_HISTOGRAM"); when(configProperties.getInt(any())).thenReturn(null); when(configProperties.getString(any())).thenReturn(null); @@ -78,12 +92,29 @@ void createMetricReader_WithConfiguration() throws IOException { try (MetricReader metricReader = provider.createMetricReader(DefaultConfigProperties.createFromMap(config))) { assertThat(metricReader) + .extracting("httpServer", as(InstanceOfAssertFactories.type(HTTPServer.class))) .extracting("server", as(InstanceOfAssertFactories.type(HttpServer.class))) .satisfies( server -> { - assertThat(server.getAddress().getHostName()).isEqualTo("localhost"); + assertThat(server.getAddress().getHostName()) + .isIn("localhost", "127.0.0.1", "kubernetes.docker.internal"); assertThat(server.getAddress().getPort()).isEqualTo(port); }); + assertThat(metricReader.getMemoryMode()).isEqualTo(MemoryMode.IMMUTABLE_DATA); + assertThat(metricReader.getDefaultAggregation(InstrumentType.HISTOGRAM)) + .isEqualTo(Aggregation.base2ExponentialBucketHistogram()); } } + + @Test + void createMetricReader_WithWrongConfiguration() { + Map config = new HashMap<>(); + config.put( + "otel.java.experimental.exporter.prometheus.metrics.default.histogram.aggregation", "foo"); + + assertThatThrownBy( + () -> provider.createMetricReader(DefaultConfigProperties.createFromMap(config))) + .isInstanceOf(ConfigurationException.class) + .hasMessageContaining("Unrecognized default histogram aggregation:"); + } } diff --git a/exporters/prometheus/src/test/resources/otel-config.yaml b/exporters/prometheus/src/test/resources/otel-config.yaml index 1db8afa45db..c6cec85cd94 100644 --- a/exporters/prometheus/src/test/resources/otel-config.yaml +++ b/exporters/prometheus/src/test/resources/otel-config.yaml @@ -1,5 +1,6 @@ extensions: - health_check: {} + health_check: + endpoint: 0.0.0.0:13133 receivers: prometheus: config: @@ -7,12 +8,12 @@ receivers: - job_name: 'app' scrape_interval: 1s static_configs: - - targets: ['$APP_ENDPOINT'] + - targets: ['${APP_ENDPOINT}'] exporters: - logging: - verbosity: $LOGGING_EXPORTER_VERBOSITY + debug: + verbosity: ${LOGGING_EXPORTER_VERBOSITY} otlp: - endpoint: $OTLP_EXPORTER_ENDPOINT + endpoint: ${OTLP_EXPORTER_ENDPOINT} tls: insecure: true compression: none @@ -21,4 +22,4 @@ service: pipelines: metrics: receivers: [prometheus] - exporters: [logging, otlp] + exporters: [debug, otlp] diff --git a/exporters/sender/grpc-managed-channel/src/main/java/io/opentelemetry/exporter/sender/grpc/managedchannel/internal/UpstreamGrpcSender.java b/exporters/sender/grpc-managed-channel/src/main/java/io/opentelemetry/exporter/sender/grpc/managedchannel/internal/UpstreamGrpcSender.java index 2a18eaf2b53..85a677d1894 100644 --- a/exporters/sender/grpc-managed-channel/src/main/java/io/opentelemetry/exporter/sender/grpc/managedchannel/internal/UpstreamGrpcSender.java +++ b/exporters/sender/grpc-managed-channel/src/main/java/io/opentelemetry/exporter/sender/grpc/managedchannel/internal/UpstreamGrpcSender.java @@ -8,15 +8,25 @@ import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.MoreExecutors; +import io.grpc.ManagedChannel; +import io.grpc.Metadata; import io.grpc.Status; +import io.grpc.StatusException; +import io.grpc.StatusRuntimeException; +import io.grpc.stub.MetadataUtils; import io.opentelemetry.exporter.internal.grpc.GrpcResponse; import io.opentelemetry.exporter.internal.grpc.GrpcSender; import io.opentelemetry.exporter.internal.grpc.MarshalerServiceStub; import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.sdk.common.CompletableResultCode; -import java.util.concurrent.TimeUnit; -import java.util.function.BiConsumer; -import org.checkerframework.checker.nullness.qual.Nullable; +import java.time.Duration; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.function.Consumer; +import java.util.function.Supplier; +import javax.annotation.Nullable; /** * A {@link GrpcSender} which uses the upstream grpc-java library. @@ -27,40 +37,88 @@ public final class UpstreamGrpcSender implements GrpcSender { private final MarshalerServiceStub stub; + private final boolean shutdownChannel; private final long timeoutNanos; + private final Supplier>> headersSupplier; + private final Executor executor; /** Creates a new {@link UpstreamGrpcSender}. */ - public UpstreamGrpcSender(MarshalerServiceStub stub, long timeoutNanos) { - this.timeoutNanos = timeoutNanos; + public UpstreamGrpcSender( + MarshalerServiceStub stub, + boolean shutdownChannel, + long timeoutNanos, + Supplier>> headersSupplier, + @Nullable ExecutorService executorService) { this.stub = stub; + this.shutdownChannel = shutdownChannel; + this.timeoutNanos = timeoutNanos; + this.headersSupplier = headersSupplier; + this.executor = executorService == null ? MoreExecutors.directExecutor() : executorService; } @Override - public void send(T request, Runnable onSuccess, BiConsumer onError) { + public void send(T request, Consumer onResponse, Consumer onError) { MarshalerServiceStub stub = this.stub; if (timeoutNanos > 0) { - stub = stub.withDeadlineAfter(timeoutNanos, TimeUnit.NANOSECONDS); + stub = stub.withDeadlineAfter(Duration.ofNanos(timeoutNanos)); } + Map> headers = headersSupplier.get(); + if (headers != null) { + Metadata metadata = new Metadata(); + for (Map.Entry> entry : headers.entrySet()) { + metadata.put( + Metadata.Key.of(entry.getKey(), Metadata.ASCII_STRING_MARSHALLER), + String.join(",", entry.getValue())); + } + stub = stub.withInterceptors(MetadataUtils.newAttachHeadersInterceptor(metadata)); + } + Futures.addCallback( stub.export(request), new FutureCallback() { @Override public void onSuccess(@Nullable Object unused) { - onSuccess.run(); + onResponse.accept( + GrpcResponse.create(Status.OK.getCode().value(), Status.OK.getDescription())); } @Override public void onFailure(Throwable t) { - Status status = Status.fromThrowable(t); - onError.accept( - GrpcResponse.create(status.getCode().value(), status.getDescription()), t); + Status status = fromThrowable(t); + if (status == null) { + onError.accept(t); + } else { + onResponse.accept( + GrpcResponse.create(status.getCode().value(), status.getDescription())); + } } }, - MoreExecutors.directExecutor()); + executor); + } + + /** + * Copy of {@link Status#fromThrowable(Throwable)} which returns null instead of {@link + * Status#UNKNOWN} when no status can be found. + */ + @Nullable + private static Status fromThrowable(Throwable cause) { + while (cause != null) { + if (cause instanceof StatusException) { + return ((StatusException) cause).getStatus(); + } else if (cause instanceof StatusRuntimeException) { + return ((StatusRuntimeException) cause).getStatus(); + } + cause = cause.getCause(); + } + return null; } @Override public CompletableResultCode shutdown() { + if (shutdownChannel) { + ManagedChannel channel = (ManagedChannel) stub.getChannel(); + channel.shutdownNow(); + } return CompletableResultCode.ofSuccess(); } } diff --git a/exporters/sender/grpc-managed-channel/src/main/java/io/opentelemetry/exporter/sender/grpc/managedchannel/internal/UpstreamGrpcSenderProvider.java b/exporters/sender/grpc-managed-channel/src/main/java/io/opentelemetry/exporter/sender/grpc/managedchannel/internal/UpstreamGrpcSenderProvider.java index b560712351f..3b26aeedbfd 100644 --- a/exporters/sender/grpc-managed-channel/src/main/java/io/opentelemetry/exporter/sender/grpc/managedchannel/internal/UpstreamGrpcSenderProvider.java +++ b/exporters/sender/grpc-managed-channel/src/main/java/io/opentelemetry/exporter/sender/grpc/managedchannel/internal/UpstreamGrpcSenderProvider.java @@ -6,22 +6,21 @@ package io.opentelemetry.exporter.sender.grpc.managedchannel.internal; import io.grpc.Channel; -import io.grpc.ClientInterceptors; import io.grpc.Codec; -import io.grpc.Metadata; -import io.grpc.stub.MetadataUtils; +import io.grpc.CompressorRegistry; +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import io.opentelemetry.exporter.internal.compression.Compressor; import io.opentelemetry.exporter.internal.grpc.GrpcSender; +import io.opentelemetry.exporter.internal.grpc.GrpcSenderConfig; import io.opentelemetry.exporter.internal.grpc.GrpcSenderProvider; import io.opentelemetry.exporter.internal.grpc.MarshalerServiceStub; import io.opentelemetry.exporter.internal.marshal.Marshaler; -import io.opentelemetry.sdk.common.export.RetryPolicy; +import java.io.IOException; +import java.io.OutputStream; import java.net.URI; +import java.util.List; import java.util.Map; -import java.util.function.BiFunction; -import java.util.function.Supplier; -import javax.annotation.Nullable; -import javax.net.ssl.SSLContext; -import javax.net.ssl.X509TrustManager; /** * {@link GrpcSender} SPI implementation for {@link UpstreamGrpcSender}. @@ -32,40 +31,77 @@ public class UpstreamGrpcSenderProvider implements GrpcSenderProvider { @Override - public GrpcSender createSender( - URI endpoint, - String endpointPath, - boolean compressionEnabled, - long timeoutNanos, - Map headers, - @Nullable Object managedChannel, - Supplier>> stubFactory, - @Nullable RetryPolicy retryPolicy, - @Nullable SSLContext sslContext, - @Nullable X509TrustManager trustManager) { - Metadata metadata = new Metadata(); + public GrpcSender createSender(GrpcSenderConfig grpcSenderConfig) { + boolean shutdownChannel = false; + Object managedChannel = grpcSenderConfig.getManagedChannel(); + if (managedChannel == null) { + // Shutdown the channel as part of the exporter shutdown sequence if + shutdownChannel = true; + managedChannel = minimalFallbackManagedChannel(grpcSenderConfig.getEndpoint()); + } + String authorityOverride = null; - for (Map.Entry entry : headers.entrySet()) { - String name = entry.getKey(); - String value = entry.getValue(); - if (name.equals("host")) { - authorityOverride = value; - continue; + Map> headers = grpcSenderConfig.getHeadersSupplier().get(); + if (headers != null) { + for (Map.Entry> entry : headers.entrySet()) { + if (entry.getKey().equals("host") && !entry.getValue().isEmpty()) { + authorityOverride = entry.getValue().get(0); + } } - metadata.put(Metadata.Key.of(name, Metadata.ASCII_STRING_MARSHALLER), value); } - Channel channel = - ClientInterceptors.intercept( - (Channel) managedChannel, MetadataUtils.newAttachHeadersInterceptor(metadata)); + String compression = Codec.Identity.NONE.getMessageEncoding(); + Compressor compressor = grpcSenderConfig.getCompressor(); + if (compressor != null) { + CompressorRegistry.getDefaultInstance() + .register( + new io.grpc.Compressor() { + @Override + public String getMessageEncoding() { + return compressor.getEncoding(); + } + + @Override + public OutputStream compress(OutputStream os) throws IOException { + return compressor.compress(os); + } + }); + compression = compressor.getEncoding(); + } - Codec codec = compressionEnabled ? new Codec.Gzip() : Codec.Identity.NONE; MarshalerServiceStub stub = - stubFactory + grpcSenderConfig + .getStubFactory() .get() - .apply(channel, authorityOverride) - .withCompression(codec.getMessageEncoding()); + .apply((Channel) managedChannel, authorityOverride) + .withCompression(compression); - return new UpstreamGrpcSender<>(stub, timeoutNanos); + return new UpstreamGrpcSender<>( + stub, + shutdownChannel, + grpcSenderConfig.getTimeoutNanos(), + grpcSenderConfig.getHeadersSupplier(), + grpcSenderConfig.getExecutorService()); + } + + /** + * If {@link ManagedChannel} is not explicitly set, provide a minimally configured fallback + * channel to avoid failing initialization. + * + *

This is required to accommodate autoconfigure with {@code + * opentelemetry-exporter-sender-grpc-managed-channel} which will always fail to initialize + * without a fallback channel since there isn't an opportunity to explicitly set the channel. + * + *

This only incorporates the target address, port, and whether to use plain text. All + * additional settings are intentionally ignored and must be configured with an explicitly set + * {@link ManagedChannel}. + */ + private static ManagedChannel minimalFallbackManagedChannel(URI endpoint) { + ManagedChannelBuilder channelBuilder = + ManagedChannelBuilder.forAddress(endpoint.getHost(), endpoint.getPort()); + if (!endpoint.getScheme().equals("https")) { + channelBuilder.usePlaintext(); + } + return channelBuilder.build(); } } diff --git a/exporters/sender/jdk/build.gradle.kts b/exporters/sender/jdk/build.gradle.kts index 2784bc66cb1..13f77b14824 100644 --- a/exporters/sender/jdk/build.gradle.kts +++ b/exporters/sender/jdk/build.gradle.kts @@ -9,6 +9,8 @@ otelJava.moduleName.set("io.opentelemetry.exporter.sender.jdk.internal") dependencies { implementation(project(":exporters:common")) implementation(project(":sdk:common")) + + compileOnly("com.fasterxml.jackson.core:jackson-core") } tasks { @@ -18,3 +20,8 @@ tasks { options.release.set(11) } } + +tasks.test { + val testJavaVersion: String? by project + enabled = !testJavaVersion.equals("8") +} diff --git a/exporters/sender/jdk/src/main/java/io/opentelemetry/exporter/sender/jdk/internal/JdkHttpSender.java b/exporters/sender/jdk/src/main/java/io/opentelemetry/exporter/sender/jdk/internal/JdkHttpSender.java index 5739ffb5596..312149728af 100644 --- a/exporters/sender/jdk/src/main/java/io/opentelemetry/exporter/sender/jdk/internal/JdkHttpSender.java +++ b/exporters/sender/jdk/src/main/java/io/opentelemetry/exporter/sender/jdk/internal/JdkHttpSender.java @@ -5,12 +5,18 @@ package io.opentelemetry.exporter.sender.jdk.internal; +import static java.util.stream.Collectors.joining; + +import io.opentelemetry.exporter.internal.compression.Compressor; import io.opentelemetry.exporter.internal.http.HttpSender; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.io.UncheckedIOException; import java.net.URI; import java.net.URISyntaxException; import java.net.http.HttpClient; @@ -18,8 +24,11 @@ import java.net.http.HttpResponse; import java.nio.ByteBuffer; import java.time.Duration; +import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; +import java.util.StringJoiner; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutorService; @@ -27,10 +36,13 @@ import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; +import java.util.function.Predicate; import java.util.function.Supplier; -import java.util.zip.GZIPOutputStream; +import java.util.logging.Level; +import java.util.logging.Logger; import javax.annotation.Nullable; import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLException; /** * {@link HttpSender} which is backed by JDK {@link HttpClient}. @@ -47,48 +59,111 @@ public final class JdkHttpSender implements HttpSender { private static final ThreadLocal threadLocalByteBufPool = ThreadLocal.withInitial(ByteBufferPool::new); - private final ExecutorService executorService = Executors.newFixedThreadPool(5); + private static final Logger logger = Logger.getLogger(JdkHttpSender.class.getName()); + + private final boolean managedExecutor; + private final ExecutorService executorService; private final HttpClient client; private final URI uri; - private final boolean compressionEnabled; + @Nullable private final Compressor compressor; + private final boolean exportAsJson; private final String contentType; private final long timeoutNanos; - private final Supplier> headerSupplier; + private final Supplier>> headerSupplier; @Nullable private final RetryPolicy retryPolicy; + private final Predicate retryExceptionPredicate; + // Visible for testing JdkHttpSender( + HttpClient client, String endpoint, - boolean compressionEnabled, + @Nullable Compressor compressor, + boolean exportAsJson, String contentType, long timeoutNanos, - Supplier> headerSupplier, + Supplier>> headerSupplier, @Nullable RetryPolicy retryPolicy, - @Nullable SSLContext sslContext) { - HttpClient.Builder builder = HttpClient.newBuilder().executor(executorService); - if (sslContext != null) { - builder.sslContext(sslContext); - } - this.client = builder.build(); + @Nullable ExecutorService executorService) { + this.client = client; try { this.uri = new URI(endpoint); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } - this.compressionEnabled = compressionEnabled; + this.compressor = compressor; + this.exportAsJson = exportAsJson; this.contentType = contentType; this.timeoutNanos = timeoutNanos; this.headerSupplier = headerSupplier; this.retryPolicy = retryPolicy; + this.retryExceptionPredicate = + Optional.ofNullable(retryPolicy) + .map(RetryPolicy::getRetryExceptionPredicate) + .orElse(JdkHttpSender::isRetryableException); + if (executorService == null) { + this.executorService = Executors.newFixedThreadPool(5); + this.managedExecutor = true; + } else { + this.executorService = executorService; + this.managedExecutor = false; + } + } + + JdkHttpSender( + String endpoint, + @Nullable Compressor compressor, + boolean exportAsJson, + String contentType, + long timeoutNanos, + long connectTimeoutNanos, + Supplier>> headerSupplier, + @Nullable RetryPolicy retryPolicy, + @Nullable ProxyOptions proxyOptions, + @Nullable SSLContext sslContext, + @Nullable ExecutorService executorService) { + this( + configureClient(sslContext, connectTimeoutNanos, proxyOptions), + endpoint, + compressor, + exportAsJson, + contentType, + timeoutNanos, + headerSupplier, + retryPolicy, + executorService); + } + + private static HttpClient configureClient( + @Nullable SSLContext sslContext, + long connectionTimeoutNanos, + @Nullable ProxyOptions proxyOptions) { + HttpClient.Builder builder = + HttpClient.newBuilder().connectTimeout(Duration.ofNanos(connectionTimeoutNanos)); + if (sslContext != null) { + builder.sslContext(sslContext); + } + if (proxyOptions != null) { + builder.proxy(proxyOptions.getProxySelector()); + } + return builder.build(); } @Override public void send( - Consumer marshaler, + Marshaler marshaler, int contentLength, Consumer onResponse, Consumer onError) { CompletableFuture> unused = - CompletableFuture.supplyAsync(() -> sendInternal(marshaler), executorService) + CompletableFuture.supplyAsync( + () -> { + try { + return sendInternal(marshaler); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }, + executorService) .whenComplete( (httpResponse, throwable) -> { if (throwable != null) { @@ -99,24 +174,28 @@ public void send( }); } - private HttpResponse sendInternal(Consumer marshaler) { + // Visible for testing + HttpResponse sendInternal(Marshaler marshaler) throws IOException { long startTimeNanos = System.nanoTime(); HttpRequest.Builder requestBuilder = HttpRequest.newBuilder().uri(uri).timeout(Duration.ofNanos(timeoutNanos)); - headerSupplier.get().forEach(requestBuilder::setHeader); + Map> headers = headerSupplier.get(); + if (headers != null) { + headers.forEach((key, values) -> values.forEach(value -> requestBuilder.header(key, value))); + } requestBuilder.header("Content-Type", contentType); NoCopyByteArrayOutputStream os = threadLocalBaos.get(); os.reset(); - if (compressionEnabled) { - requestBuilder.header("Content-Encoding", "gzip"); - try (GZIPOutputStream gzos = new GZIPOutputStream(os)) { - marshaler.accept(gzos); + if (compressor != null) { + requestBuilder.header("Content-Encoding", compressor.getEncoding()); + try (OutputStream compressed = compressor.compress(os)) { + write(marshaler, compressed); } catch (IOException e) { throw new IllegalStateException(e); } } else { - marshaler.accept(os); + write(marshaler, os); } ByteBufferPool byteBufferPool = threadLocalByteBufPool.get(); @@ -129,46 +208,112 @@ private HttpResponse sendInternal(Consumer marshaler) { long attempt = 0; long nextBackoffNanos = retryPolicy.getInitialBackoff().toNanos(); + HttpResponse httpResponse = null; + IOException exception = null; do { - requestBuilder.timeout(Duration.ofNanos(timeoutNanos - (System.nanoTime() - startTimeNanos))); - HttpResponse httpResponse = sendRequest(requestBuilder, byteBufferPool); - attempt++; - if (attempt >= retryPolicy.getMaxAttempts() - || !retryableStatusCodes.contains(httpResponse.statusCode())) { - return httpResponse; + if (attempt > 0) { + // Compute and sleep for backoff + long currentBackoffNanos = + Math.min(nextBackoffNanos, retryPolicy.getMaxBackoff().toNanos()); + long backoffNanos = + (long) (ThreadLocalRandom.current().nextDouble(0.8d, 1.2d) * currentBackoffNanos); + nextBackoffNanos = (long) (currentBackoffNanos * retryPolicy.getBackoffMultiplier()); + try { + TimeUnit.NANOSECONDS.sleep(backoffNanos); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + break; // Break out and return response or throw + } + // If after sleeping we've exceeded timeoutNanos, break out and return response or throw + if ((System.nanoTime() - startTimeNanos) >= timeoutNanos) { + break; + } } - - // Compute and sleep for backoff - long upperBoundNanos = Math.min(nextBackoffNanos, retryPolicy.getMaxBackoff().toNanos()); - long backoffNanos = ThreadLocalRandom.current().nextLong(upperBoundNanos); - nextBackoffNanos = (long) (nextBackoffNanos * retryPolicy.getBackoffMultiplier()); + httpResponse = null; + exception = null; + requestBuilder.timeout(Duration.ofNanos(timeoutNanos - (System.nanoTime() - startTimeNanos))); try { - TimeUnit.NANOSECONDS.sleep(backoffNanos); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new IllegalStateException(e); - } - if ((System.nanoTime() - startTimeNanos) >= timeoutNanos) { - return httpResponse; + httpResponse = sendRequest(requestBuilder, byteBufferPool); + boolean retryable = retryableStatusCodes.contains(httpResponse.statusCode()); + if (logger.isLoggable(Level.FINER)) { + logger.log( + Level.FINER, + "Attempt " + + attempt + + " returned " + + (retryable ? "retryable" : "non-retryable") + + " response: " + + responseStringRepresentation(httpResponse)); + } + if (!retryable) { + return httpResponse; + } + } catch (IOException e) { + exception = e; + boolean retryable = retryExceptionPredicate.test(exception); + if (logger.isLoggable(Level.FINER)) { + logger.log( + Level.FINER, + "Attempt " + + attempt + + " failed with " + + (retryable ? "retryable" : "non-retryable") + + " exception", + exception); + } + if (!retryable) { + throw exception; + } } - } while (true); + } while (++attempt < retryPolicy.getMaxAttempts()); + + if (httpResponse != null) { + return httpResponse; + } + throw exception; + } + + private static String responseStringRepresentation(HttpResponse response) { + StringJoiner joiner = new StringJoiner(",", "HttpResponse{", "}"); + joiner.add("code=" + response.statusCode()); + joiner.add( + "headers=" + + response.headers().map().entrySet().stream() + .map(entry -> entry.getKey() + "=" + String.join(",", entry.getValue())) + .collect(joining(",", "[", "]"))); + return joiner.toString(); + } + + private void write(Marshaler marshaler, OutputStream os) throws IOException { + if (exportAsJson) { + marshaler.writeJsonTo(os); + } else { + marshaler.writeBinaryTo(os); + } } private HttpResponse sendRequest( - HttpRequest.Builder requestBuilder, ByteBufferPool byteBufferPool) { + HttpRequest.Builder requestBuilder, ByteBufferPool byteBufferPool) throws IOException { try { return client.send(requestBuilder.build(), HttpResponse.BodyHandlers.ofByteArray()); - } catch (IOException | InterruptedException e) { - if (e instanceof InterruptedException) { - Thread.currentThread().interrupt(); - } - // TODO: is throwable retryable? + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); throw new IllegalStateException(e); } finally { byteBufferPool.resetPool(); } } + private static boolean isRetryableException(IOException throwable) { + // Almost all IOExceptions we've encountered are transient retryable, so we opt out of specific + // IOExceptions that are unlikely to resolve rather than opting in. + // Known retryable IOException messages: "Connection reset", "/{remote ip}:{remote port} GOAWAY + // received" + // Known retryable HttpTimeoutException messages: "request timed out" + // Known retryable HttpConnectTimeoutException messages: "HTTP connect timed out" + return !(throwable instanceof SSLException); + } + private static class NoCopyByteArrayOutputStream extends ByteArrayOutputStream { NoCopyByteArrayOutputStream() { super(retryableStatusCodes.size()); @@ -226,7 +371,9 @@ private void resetPool() { @Override public CompletableResultCode shutdown() { - executorService.shutdown(); + if (managedExecutor) { + executorService.shutdown(); + } return CompletableResultCode.ofSuccess(); } } diff --git a/exporters/sender/jdk/src/main/java/io/opentelemetry/exporter/sender/jdk/internal/JdkHttpSenderProvider.java b/exporters/sender/jdk/src/main/java/io/opentelemetry/exporter/sender/jdk/internal/JdkHttpSenderProvider.java index dbe93eef3a3..35b7f819aa2 100644 --- a/exporters/sender/jdk/src/main/java/io/opentelemetry/exporter/sender/jdk/internal/JdkHttpSenderProvider.java +++ b/exporters/sender/jdk/src/main/java/io/opentelemetry/exporter/sender/jdk/internal/JdkHttpSenderProvider.java @@ -5,15 +5,9 @@ package io.opentelemetry.exporter.sender.jdk.internal; -import io.opentelemetry.exporter.internal.auth.Authenticator; import io.opentelemetry.exporter.internal.http.HttpSender; +import io.opentelemetry.exporter.internal.http.HttpSenderConfig; import io.opentelemetry.exporter.internal.http.HttpSenderProvider; -import io.opentelemetry.sdk.common.export.RetryPolicy; -import java.util.Map; -import java.util.function.Supplier; -import javax.annotation.Nullable; -import javax.net.ssl.SSLContext; -import javax.net.ssl.X509TrustManager; /** * {@link HttpSender} SPI implementation for {@link JdkHttpSender}. @@ -24,23 +18,18 @@ public final class JdkHttpSenderProvider implements HttpSenderProvider { @Override - public HttpSender createSender( - String endpoint, - boolean compressionEnabled, - String contentType, - long timeoutNanos, - Supplier> headerSupplier, - @Nullable Authenticator authenticator, - @Nullable RetryPolicy retryPolicy, - @Nullable SSLContext sslContext, - @Nullable X509TrustManager trustManager) { + public HttpSender createSender(HttpSenderConfig httpSenderConfig) { return new JdkHttpSender( - endpoint, - compressionEnabled, - contentType, - timeoutNanos, - headerSupplier, - retryPolicy, - sslContext); + httpSenderConfig.getEndpoint(), + httpSenderConfig.getCompressor(), + httpSenderConfig.getExportAsJson(), + httpSenderConfig.getContentType(), + httpSenderConfig.getTimeoutNanos(), + httpSenderConfig.getConnectTimeoutNanos(), + httpSenderConfig.getHeadersSupplier(), + httpSenderConfig.getRetryPolicy(), + httpSenderConfig.getProxyOptions(), + httpSenderConfig.getSslContext(), + httpSenderConfig.getExecutorService()); } } diff --git a/exporters/sender/jdk/src/test/java/io/opentelemetry/exporter/sender/jdk/internal/JdkHttpSenderTest.java b/exporters/sender/jdk/src/test/java/io/opentelemetry/exporter/sender/jdk/internal/JdkHttpSenderTest.java new file mode 100644 index 00000000000..c5c26f69bec --- /dev/null +++ b/exporters/sender/jdk/src/test/java/io/opentelemetry/exporter/sender/jdk/internal/JdkHttpSenderTest.java @@ -0,0 +1,168 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.sender.jdk.internal; + +import static org.assertj.core.api.Assertions.as; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import io.opentelemetry.sdk.common.export.RetryPolicy; +import java.io.IOException; +import java.net.ConnectException; +import java.net.ServerSocket; +import java.net.http.HttpClient; +import java.net.http.HttpConnectTimeoutException; +import java.time.Duration; +import java.util.Collections; +import java.util.concurrent.TimeUnit; +import javax.net.ssl.SSLException; +import org.assertj.core.api.InstanceOfAssertFactories; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class JdkHttpSenderTest { + + private final HttpClient realHttpClient = + HttpClient.newBuilder().connectTimeout(Duration.ofMillis(10)).build(); + @Mock private HttpClient mockHttpClient; + private JdkHttpSender sender; + + @BeforeEach + void setup() throws IOException, InterruptedException { + // Can't directly spy on HttpClient for some reason, so create a real instance and a mock that + // delegates to the real thing + when(mockHttpClient.send(any(), any())) + .thenAnswer( + invocation -> + realHttpClient.send(invocation.getArgument(0), invocation.getArgument(1))); + sender = + new JdkHttpSender( + mockHttpClient, + // Connecting to a non-routable IP address to trigger connection timeout + "http://10.255.255.1", + null, + false, + "text/plain", + Duration.ofSeconds(10).toNanos(), + Collections::emptyMap, + RetryPolicy.builder().setMaxAttempts(2).setInitialBackoff(Duration.ofMillis(1)).build(), + null); + } + + @Test + void sendInternal_RetryableConnectTimeoutException() throws IOException, InterruptedException { + assertThatThrownBy(() -> sender.sendInternal(new NoOpMarshaler())) + .isInstanceOf(HttpConnectTimeoutException.class); + + verify(mockHttpClient, times(2)).send(any(), any()); + } + + @Test + void sendInternal_RetryableConnectException() throws IOException, InterruptedException { + sender = + new JdkHttpSender( + mockHttpClient, + // Connecting to localhost on an unused port address to trigger + // java.net.ConnectException (or java.net.http.HttpConnectTimeoutException on linux java + // 11+) + "http://localhost:" + freePort(), + null, + false, + "text/plain", + Duration.ofSeconds(10).toNanos(), + Collections::emptyMap, + RetryPolicy.builder().setMaxAttempts(2).setInitialBackoff(Duration.ofMillis(1)).build(), + null); + + assertThatThrownBy(() -> sender.sendInternal(new NoOpMarshaler())) + .satisfies( + e -> + assertThat( + (e instanceof ConnectException) + || (e instanceof HttpConnectTimeoutException)) + .isTrue()); + + verify(mockHttpClient, times(2)).send(any(), any()); + } + + private static int freePort() { + try (ServerSocket socket = new ServerSocket(0)) { + return socket.getLocalPort(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Test + void sendInternal_RetryableIoException() throws IOException, InterruptedException { + doThrow(new IOException("error!")).when(mockHttpClient).send(any(), any()); + + assertThatThrownBy(() -> sender.sendInternal(new NoOpMarshaler())) + .isInstanceOf(IOException.class) + .hasMessage("error!"); + + verify(mockHttpClient, times(2)).send(any(), any()); + } + + @Test + void sendInternal_NonRetryableException() throws IOException, InterruptedException { + doThrow(new SSLException("unknown error")).when(mockHttpClient).send(any(), any()); + + assertThatThrownBy(() -> sender.sendInternal(new NoOpMarshaler())) + .isInstanceOf(IOException.class) + .hasMessage("unknown error"); + + verify(mockHttpClient, times(1)).send(any(), any()); + } + + @Test + void connectTimeout() { + sender = + new JdkHttpSender( + "http://localhost", + null, + false, + "text/plain", + 1, + TimeUnit.SECONDS.toNanos(10), + Collections::emptyMap, + null, + null, + null, + null); + + assertThat(sender) + .extracting("client", as(InstanceOfAssertFactories.type(HttpClient.class))) + .satisfies( + httpClient -> + assertThat(httpClient.connectTimeout().get()).isEqualTo(Duration.ofSeconds(10))); + } + + private static class NoOpMarshaler extends Marshaler { + + @Override + public int getBinarySerializedSize() { + return 0; + } + + @Override + protected void writeTo(Serializer output) {} + } +} diff --git a/exporters/sender/okhttp/build.gradle.kts b/exporters/sender/okhttp/build.gradle.kts index ef2c6c59c5f..107270e9ada 100644 --- a/exporters/sender/okhttp/build.gradle.kts +++ b/exporters/sender/okhttp/build.gradle.kts @@ -15,6 +15,7 @@ dependencies { implementation("com.squareup.okhttp3:okhttp") compileOnly("io.grpc:grpc-stub") + compileOnly("com.fasterxml.jackson.core:jackson-core") testImplementation("com.linecorp.armeria:armeria-junit5") } diff --git a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/GrpcRequestBody.java b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/GrpcRequestBody.java index ca9191360db..7baa5c4dce0 100644 --- a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/GrpcRequestBody.java +++ b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/GrpcRequestBody.java @@ -5,6 +5,7 @@ package io.opentelemetry.exporter.sender.okhttp.internal; +import io.opentelemetry.exporter.internal.compression.Compressor; import io.opentelemetry.exporter.internal.marshal.Marshaler; import java.io.IOException; import javax.annotation.Nullable; @@ -12,7 +13,6 @@ import okhttp3.RequestBody; import okio.Buffer; import okio.BufferedSink; -import okio.GzipSink; import okio.Okio; /** @@ -33,15 +33,15 @@ public final class GrpcRequestBody extends RequestBody { private final Marshaler marshaler; private final int messageSize; private final int contentLength; - private final boolean compressed; + @Nullable private final Compressor compressor; /** Creates a new {@link GrpcRequestBody}. */ - public GrpcRequestBody(Marshaler marshaler, boolean compressed) { + public GrpcRequestBody(Marshaler marshaler, @Nullable Compressor compressor) { this.marshaler = marshaler; - this.compressed = compressed; + this.compressor = compressor; messageSize = marshaler.getBinarySerializedSize(); - if (compressed) { + if (compressor != null) { // Content length not known since we want to compress on the I/O thread. contentLength = -1; } else { @@ -62,14 +62,15 @@ public long contentLength() { @Override public void writeTo(BufferedSink sink) throws IOException { - if (!compressed) { + if (compressor == null) { sink.writeByte(UNCOMPRESSED_FLAG); sink.writeInt(messageSize); marshaler.writeBinaryTo(sink.outputStream()); } else { try (Buffer compressedBody = new Buffer()) { - try (BufferedSink gzipSink = Okio.buffer(new GzipSink(compressedBody))) { - marshaler.writeBinaryTo(gzipSink.outputStream()); + try (BufferedSink compressedSink = + Okio.buffer(Okio.sink(compressor.compress(compressedBody.outputStream())))) { + marshaler.writeBinaryTo(compressedSink.outputStream()); } sink.writeByte(COMPRESSED_FLAG); int compressedBytes = (int) compressedBody.size(); diff --git a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSender.java b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSender.java index 5da750edead..673e4271cb0 100644 --- a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSender.java +++ b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSender.java @@ -23,7 +23,9 @@ package io.opentelemetry.exporter.sender.okhttp.internal; +import io.opentelemetry.api.internal.InstrumentationUtil; import io.opentelemetry.exporter.internal.RetryUtil; +import io.opentelemetry.exporter.internal.compression.Compressor; import io.opentelemetry.exporter.internal.grpc.GrpcExporterUtil; import io.opentelemetry.exporter.internal.grpc.GrpcResponse; import io.opentelemetry.exporter.internal.grpc.GrpcSender; @@ -36,14 +38,18 @@ import java.time.Duration; import java.util.Arrays; import java.util.Collections; +import java.util.List; import java.util.Map; -import java.util.function.BiConsumer; +import java.util.concurrent.ExecutorService; +import java.util.function.Consumer; +import java.util.function.Supplier; import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; import okhttp3.Call; import okhttp3.Callback; -import okhttp3.Headers; +import okhttp3.ConnectionSpec; +import okhttp3.Dispatcher; import okhttp3.HttpUrl; import okhttp3.OkHttpClient; import okhttp3.Protocol; @@ -62,101 +68,114 @@ public final class OkHttpGrpcSender implements GrpcSender>> headersSupplier; + @Nullable private final Compressor compressor; /** Creates a new {@link OkHttpGrpcSender}. */ public OkHttpGrpcSender( String endpoint, - boolean compressionEnabled, + @Nullable Compressor compressor, long timeoutNanos, - Map headers, + long connectTimeoutNanos, + Supplier>> headersSupplier, @Nullable RetryPolicy retryPolicy, @Nullable SSLContext sslContext, - @Nullable X509TrustManager trustManager) { + @Nullable X509TrustManager trustManager, + @Nullable ExecutorService executorService) { + int callTimeoutMillis = + (int) Math.min(Duration.ofNanos(timeoutNanos).toMillis(), Integer.MAX_VALUE); + int connectTimeoutMillis = + (int) Math.min(Duration.ofNanos(connectTimeoutNanos).toMillis(), Integer.MAX_VALUE); + + Dispatcher dispatcher; + if (executorService == null) { + dispatcher = OkHttpUtil.newDispatcher(); + this.managedExecutor = true; + } else { + dispatcher = new Dispatcher(executorService); + this.managedExecutor = false; + } + OkHttpClient.Builder clientBuilder = new OkHttpClient.Builder() - .dispatcher(OkHttpUtil.newDispatcher()) - .callTimeout(Duration.ofNanos(timeoutNanos)); + .dispatcher(dispatcher) + .callTimeout(Duration.ofMillis(callTimeoutMillis)) + .connectTimeout(Duration.ofMillis(connectTimeoutMillis)); if (retryPolicy != null) { clientBuilder.addInterceptor( new RetryInterceptor(retryPolicy, OkHttpGrpcSender::isRetryable)); } - if (sslContext != null && trustManager != null) { - clientBuilder.sslSocketFactory(sslContext.getSocketFactory(), trustManager); - } - if (endpoint.startsWith("http://")) { + + boolean isPlainHttp = endpoint.startsWith("http://"); + if (isPlainHttp) { + clientBuilder.connectionSpecs(Collections.singletonList(ConnectionSpec.CLEARTEXT)); clientBuilder.protocols(Collections.singletonList(Protocol.H2_PRIOR_KNOWLEDGE)); } else { clientBuilder.protocols(Arrays.asList(Protocol.HTTP_2, Protocol.HTTP_1_1)); + if (sslContext != null && trustManager != null) { + clientBuilder.sslSocketFactory(sslContext.getSocketFactory(), trustManager); + } } - this.client = clientBuilder.build(); - Headers.Builder headersBuilder = new Headers.Builder(); - headers.forEach(headersBuilder::add); - headersBuilder.add("te", "trailers"); - if (compressionEnabled) { - headersBuilder.add("grpc-encoding", "gzip"); - } - this.headers = headersBuilder.build(); + this.client = clientBuilder.build(); + this.headersSupplier = headersSupplier; this.url = HttpUrl.get(endpoint); - this.compressionEnabled = compressionEnabled; + this.compressor = compressor; } @Override - public void send(T request, Runnable onSuccess, BiConsumer onError) { - Request.Builder requestBuilder = new Request.Builder().url(url).headers(headers); + public void send(T request, Consumer onResponse, Consumer onError) { + Request.Builder requestBuilder = new Request.Builder().url(url); - RequestBody requestBody = new GrpcRequestBody(request, compressionEnabled); + Map> headers = headersSupplier.get(); + if (headers != null) { + headers.forEach( + (key, values) -> values.forEach(value -> requestBuilder.addHeader(key, value))); + } + requestBuilder.addHeader("te", "trailers"); + if (compressor != null) { + requestBuilder.addHeader("grpc-encoding", compressor.getEncoding()); + } + RequestBody requestBody = new GrpcRequestBody(request, compressor); requestBuilder.post(requestBody); - client - .newCall(requestBuilder.build()) - .enqueue( - new Callback() { - @Override - public void onFailure(Call call, IOException e) { - String description = e.getMessage(); - if (description == null) { - description = ""; - } - onError.accept(GrpcResponse.create(2 /* UNKNOWN */, description), e); - } - - @Override - public void onResponse(Call call, Response response) { - // Response body is empty but must be consumed to access trailers. - try { - response.body().bytes(); - } catch (IOException e) { - onError.accept( - GrpcResponse.create( - GrpcExporterUtil.GRPC_STATUS_UNKNOWN, - "Could not consume server response."), - e); - return; - } - - String status = grpcStatus(response); - if ("0".equals(status)) { - onSuccess.run(); - return; - } - - String errorMessage = grpcMessage(response); - int statusCode; - try { - statusCode = Integer.parseInt(status); - } catch (NumberFormatException ex) { - statusCode = GrpcExporterUtil.GRPC_STATUS_UNKNOWN; - } - onError.accept( - GrpcResponse.create(statusCode, errorMessage), - new IllegalStateException(errorMessage)); - } - }); + InstrumentationUtil.suppressInstrumentation( + () -> + client + .newCall(requestBuilder.build()) + .enqueue( + new Callback() { + @Override + public void onFailure(Call call, IOException e) { + onError.accept(e); + } + + @Override + public void onResponse(Call call, Response response) { + // Response body is empty but must be consumed to access trailers. + try { + response.body().bytes(); + } catch (IOException e) { + onError.accept( + new RuntimeException("Could not consume server response", e)); + return; + } + + String status = grpcStatus(response); + + String description = grpcMessage(response); + int statusCode; + try { + statusCode = Integer.parseInt(status); + } catch (NumberFormatException ex) { + statusCode = GrpcExporterUtil.GRPC_STATUS_UNKNOWN; + } + onResponse.accept(GrpcResponse.create(statusCode, description)); + } + })); } @Nullable @@ -193,21 +212,21 @@ private static String grpcMessage(Response response) { @Override public CompletableResultCode shutdown() { client.dispatcher().cancelAll(); - client.dispatcher().executorService().shutdownNow(); + if (managedExecutor) { + client.dispatcher().executorService().shutdownNow(); + } client.connectionPool().evictAll(); return CompletableResultCode.ofSuccess(); } /** Whether response is retriable or not. */ public static boolean isRetryable(Response response) { - // Only retry on gRPC codes which will always come with an HTTP success - if (!response.isSuccessful()) { - return false; - } - // We don't check trailers for retry since retryable error codes always come with response // headers, not trailers, in practice. String grpcStatus = response.header(GRPC_STATUS); + if (grpcStatus == null) { + return false; + } return RetryUtil.retryableGrpcStatusCodes().contains(grpcStatus); } diff --git a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSenderProvider.java b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSenderProvider.java index c1cbb3be664..4d7f0136919 100644 --- a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSenderProvider.java +++ b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSenderProvider.java @@ -5,19 +5,10 @@ package io.opentelemetry.exporter.sender.okhttp.internal; -import io.grpc.Channel; import io.opentelemetry.exporter.internal.grpc.GrpcSender; +import io.opentelemetry.exporter.internal.grpc.GrpcSenderConfig; import io.opentelemetry.exporter.internal.grpc.GrpcSenderProvider; -import io.opentelemetry.exporter.internal.grpc.MarshalerServiceStub; import io.opentelemetry.exporter.internal.marshal.Marshaler; -import io.opentelemetry.sdk.common.export.RetryPolicy; -import java.net.URI; -import java.util.Map; -import java.util.function.BiFunction; -import java.util.function.Supplier; -import javax.net.ssl.SSLContext; -import javax.net.ssl.X509TrustManager; -import org.jetbrains.annotations.Nullable; /** * {@link GrpcSender} SPI implementation for {@link OkHttpGrpcSender}. @@ -28,24 +19,16 @@ public class OkHttpGrpcSenderProvider implements GrpcSenderProvider { @Override - public GrpcSender createSender( - URI endpoint, - String endpointPath, - boolean compressionEnabled, - long timeoutNanos, - Map headers, - @Nullable Object managedChannel, - Supplier>> stubFactory, - @Nullable RetryPolicy retryPolicy, - @Nullable SSLContext sslContext, - @Nullable X509TrustManager trustManager) { + public GrpcSender createSender(GrpcSenderConfig grpcSenderConfig) { return new OkHttpGrpcSender<>( - endpoint.resolve(endpointPath).toString(), - compressionEnabled, - timeoutNanos, - headers, - retryPolicy, - sslContext, - trustManager); + grpcSenderConfig.getEndpoint().resolve(grpcSenderConfig.getEndpointPath()).toString(), + grpcSenderConfig.getCompressor(), + grpcSenderConfig.getTimeoutNanos(), + grpcSenderConfig.getConnectTimeoutNanos(), + grpcSenderConfig.getHeadersSupplier(), + grpcSenderConfig.getRetryPolicy(), + grpcSenderConfig.getSslContext(), + grpcSenderConfig.getTrustManager(), + grpcSenderConfig.getExecutorService()); } } diff --git a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpHttpSender.java b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpHttpSender.java index 306c7b97163..7b5e2081ec4 100644 --- a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpHttpSender.java +++ b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpHttpSender.java @@ -5,15 +5,20 @@ package io.opentelemetry.exporter.sender.okhttp.internal; +import io.opentelemetry.api.internal.InstrumentationUtil; import io.opentelemetry.exporter.internal.RetryUtil; -import io.opentelemetry.exporter.internal.auth.Authenticator; +import io.opentelemetry.exporter.internal.compression.Compressor; import io.opentelemetry.exporter.internal.http.HttpSender; +import io.opentelemetry.exporter.internal.marshal.Marshaler; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.ProxyOptions; import io.opentelemetry.sdk.common.export.RetryPolicy; import java.io.IOException; -import java.io.OutputStream; import java.time.Duration; +import java.util.Collections; +import java.util.List; import java.util.Map; +import java.util.concurrent.ExecutorService; import java.util.function.Consumer; import java.util.function.Supplier; import javax.annotation.Nullable; @@ -21,6 +26,8 @@ import javax.net.ssl.X509TrustManager; import okhttp3.Call; import okhttp3.Callback; +import okhttp3.ConnectionSpec; +import okhttp3.Dispatcher; import okhttp3.HttpUrl; import okhttp3.MediaType; import okhttp3.OkHttpClient; @@ -28,7 +35,6 @@ import okhttp3.RequestBody; import okhttp3.ResponseBody; import okio.BufferedSink; -import okio.GzipSink; import okio.Okio; /** @@ -39,106 +45,140 @@ */ public final class OkHttpHttpSender implements HttpSender { + private final boolean managedExecutor; private final OkHttpClient client; private final HttpUrl url; - private final boolean compressionEnabled; - private final Supplier> headerSupplier; + @Nullable private final Compressor compressor; + private final boolean exportAsJson; + private final Supplier>> headerSupplier; private final MediaType mediaType; /** Create a sender. */ + @SuppressWarnings("TooManyParameters") public OkHttpHttpSender( String endpoint, - boolean compressionEnabled, + @Nullable Compressor compressor, + boolean exportAsJson, String contentType, long timeoutNanos, - Supplier> headerSupplier, - @Nullable Authenticator authenticator, + long connectionTimeoutNanos, + Supplier>> headerSupplier, + @Nullable ProxyOptions proxyOptions, @Nullable RetryPolicy retryPolicy, @Nullable SSLContext sslContext, - @Nullable X509TrustManager trustManager) { + @Nullable X509TrustManager trustManager, + @Nullable ExecutorService executorService) { + int callTimeoutMillis = + (int) Math.min(Duration.ofNanos(timeoutNanos).toMillis(), Integer.MAX_VALUE); + int connectTimeoutMillis = + (int) Math.min(Duration.ofNanos(connectionTimeoutNanos).toMillis(), Integer.MAX_VALUE); + + Dispatcher dispatcher; + if (executorService == null) { + dispatcher = OkHttpUtil.newDispatcher(); + this.managedExecutor = true; + } else { + dispatcher = new Dispatcher(executorService); + this.managedExecutor = false; + } + OkHttpClient.Builder builder = new OkHttpClient.Builder() - .dispatcher(OkHttpUtil.newDispatcher()) - .callTimeout(Duration.ofNanos(timeoutNanos)); - - if (authenticator != null) { - Authenticator finalAuthenticator = authenticator; - // Generate and attach OkHttp Authenticator implementation - builder.authenticator( - (route, response) -> { - Request.Builder requestBuilder = response.request().newBuilder(); - finalAuthenticator.getHeaders().forEach(requestBuilder::header); - return requestBuilder.build(); - }); + .dispatcher(dispatcher) + .connectTimeout(Duration.ofMillis(connectTimeoutMillis)) + .callTimeout(Duration.ofMillis(callTimeoutMillis)); + + if (proxyOptions != null) { + builder.proxySelector(proxyOptions.getProxySelector()); } if (retryPolicy != null) { builder.addInterceptor(new RetryInterceptor(retryPolicy, OkHttpHttpSender::isRetryable)); } - if (sslContext != null && trustManager != null) { + + boolean isPlainHttp = endpoint.startsWith("http://"); + if (isPlainHttp) { + builder.connectionSpecs(Collections.singletonList(ConnectionSpec.CLEARTEXT)); + } else if (sslContext != null && trustManager != null) { builder.sslSocketFactory(sslContext.getSocketFactory(), trustManager); } + this.client = builder.build(); this.url = HttpUrl.get(endpoint); - this.compressionEnabled = compressionEnabled; + this.compressor = compressor; + this.exportAsJson = exportAsJson; this.mediaType = MediaType.parse(contentType); this.headerSupplier = headerSupplier; } @Override public void send( - Consumer marshaler, + Marshaler marshaler, int contentLength, Consumer onResponse, Consumer onError) { Request.Builder requestBuilder = new Request.Builder().url(url); - headerSupplier.get().forEach(requestBuilder::addHeader); - RequestBody body = new RawRequestBody(marshaler, contentLength, mediaType); - if (compressionEnabled) { - requestBuilder.addHeader("Content-Encoding", "gzip"); - requestBuilder.post(new GzipRequestBody(body)); + + Map> headers = headerSupplier.get(); + if (headers != null) { + headers.forEach( + (key, values) -> values.forEach(value -> requestBuilder.addHeader(key, value))); + } + RequestBody body = new RawRequestBody(marshaler, exportAsJson, contentLength, mediaType); + if (compressor != null) { + requestBuilder.addHeader("Content-Encoding", compressor.getEncoding()); + requestBuilder.post(new CompressedRequestBody(compressor, body)); } else { requestBuilder.post(body); } - client - .newCall(requestBuilder.build()) - .enqueue( - new Callback() { - @Override - public void onFailure(Call call, IOException e) { - onError.accept(e); - } - - @Override - public void onResponse(Call call, okhttp3.Response response) { - try (ResponseBody body = response.body()) { - onResponse.accept( - new Response() { - @Override - public int statusCode() { - return response.code(); - } - - @Override - public String statusMessage() { - return response.message(); + InstrumentationUtil.suppressInstrumentation( + () -> + client + .newCall(requestBuilder.build()) + .enqueue( + new Callback() { + @Override + public void onFailure(Call call, IOException e) { + onError.accept(e); + } + + @Override + public void onResponse(Call call, okhttp3.Response response) { + try (ResponseBody body = response.body()) { + onResponse.accept( + new Response() { + @Nullable private byte[] bodyBytes; + + @Override + public int statusCode() { + return response.code(); + } + + @Override + public String statusMessage() { + return response.message(); + } + + @Override + public byte[] responseBody() throws IOException { + if (bodyBytes == null) { + bodyBytes = body.bytes(); + } + return bodyBytes; + } + }); } - - @Override - public byte[] responseBody() throws IOException { - return body.bytes(); - } - }); - } - } - }); + } + })); } @Override public CompletableResultCode shutdown() { client.dispatcher().cancelAll(); - client.dispatcher().executorService().shutdownNow(); + if (managedExecutor) { + client.dispatcher().executorService().shutdownNow(); + } client.connectionPool().evictAll(); return CompletableResultCode.ofSuccess(); } @@ -149,13 +189,15 @@ static boolean isRetryable(okhttp3.Response response) { private static class RawRequestBody extends RequestBody { - private final Consumer marshaler; + private final Marshaler marshaler; + private final boolean exportAsJson; private final int contentLength; private final MediaType mediaType; private RawRequestBody( - Consumer marshaler, int contentLength, MediaType mediaType) { + Marshaler marshaler, boolean exportAsJson, int contentLength, MediaType mediaType) { this.marshaler = marshaler; + this.exportAsJson = exportAsJson; this.contentLength = contentLength; this.mediaType = mediaType; } @@ -171,15 +213,21 @@ public MediaType contentType() { } @Override - public void writeTo(BufferedSink bufferedSink) { - marshaler.accept(bufferedSink.outputStream()); + public void writeTo(BufferedSink bufferedSink) throws IOException { + if (exportAsJson) { + marshaler.writeJsonTo(bufferedSink.outputStream()); + } else { + marshaler.writeBinaryTo(bufferedSink.outputStream()); + } } } - private static class GzipRequestBody extends RequestBody { + private static class CompressedRequestBody extends RequestBody { + private final Compressor compressor; private final RequestBody requestBody; - private GzipRequestBody(RequestBody requestBody) { + private CompressedRequestBody(Compressor compressor, RequestBody requestBody) { + this.compressor = compressor; this.requestBody = requestBody; } @@ -195,9 +243,10 @@ public long contentLength() { @Override public void writeTo(BufferedSink bufferedSink) throws IOException { - BufferedSink gzipSink = Okio.buffer(new GzipSink(bufferedSink)); - requestBody.writeTo(gzipSink); - gzipSink.close(); + BufferedSink compressedSink = + Okio.buffer(Okio.sink(compressor.compress(bufferedSink.outputStream()))); + requestBody.writeTo(compressedSink); + compressedSink.close(); } } } diff --git a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpHttpSenderProvider.java b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpHttpSenderProvider.java index edf5e9cf45e..8c7c3aa0f4b 100644 --- a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpHttpSenderProvider.java +++ b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpHttpSenderProvider.java @@ -5,15 +5,9 @@ package io.opentelemetry.exporter.sender.okhttp.internal; -import io.opentelemetry.exporter.internal.auth.Authenticator; import io.opentelemetry.exporter.internal.http.HttpSender; +import io.opentelemetry.exporter.internal.http.HttpSenderConfig; import io.opentelemetry.exporter.internal.http.HttpSenderProvider; -import io.opentelemetry.sdk.common.export.RetryPolicy; -import java.util.Map; -import java.util.function.Supplier; -import javax.net.ssl.SSLContext; -import javax.net.ssl.X509TrustManager; -import org.jetbrains.annotations.Nullable; /** * {@link HttpSender} SPI implementation for {@link OkHttpHttpSender}. @@ -24,25 +18,19 @@ public final class OkHttpHttpSenderProvider implements HttpSenderProvider { @Override - public HttpSender createSender( - String endpoint, - boolean compressionEnabled, - String contentType, - long timeoutNanos, - Supplier> headerSupplier, - @Nullable Authenticator authenticator, - @Nullable RetryPolicy retryPolicy, - @Nullable SSLContext sslContext, - @Nullable X509TrustManager trustManager) { + public HttpSender createSender(HttpSenderConfig httpSenderConfig) { return new OkHttpHttpSender( - endpoint, - compressionEnabled, - contentType, - timeoutNanos, - headerSupplier, - authenticator, - retryPolicy, - sslContext, - trustManager); + httpSenderConfig.getEndpoint(), + httpSenderConfig.getCompressor(), + httpSenderConfig.getExportAsJson(), + httpSenderConfig.getContentType(), + httpSenderConfig.getTimeoutNanos(), + httpSenderConfig.getConnectTimeoutNanos(), + httpSenderConfig.getHeadersSupplier(), + httpSenderConfig.getProxyOptions(), + httpSenderConfig.getRetryPolicy(), + httpSenderConfig.getSslContext(), + httpSenderConfig.getTrustManager(), + httpSenderConfig.getExecutorService()); } } diff --git a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpUtil.java b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpUtil.java index 8aef6b7c21a..b641d1bad05 100644 --- a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpUtil.java +++ b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpUtil.java @@ -18,6 +18,13 @@ * at any time. */ public final class OkHttpUtil { + @SuppressWarnings("NonFinalStaticField") + private static boolean propagateContextForTestingInDispatcher = false; + + public static void setPropagateContextForTestingInDispatcher( + boolean propagateContextForTestingInDispatcher) { + OkHttpUtil.propagateContextForTestingInDispatcher = propagateContextForTestingInDispatcher; + } /** Returns a {@link Dispatcher} using daemon threads, otherwise matching the OkHttp default. */ public static Dispatcher newDispatcher() { @@ -28,7 +35,7 @@ public static Dispatcher newDispatcher() { 60, TimeUnit.SECONDS, new SynchronousQueue<>(), - new DaemonThreadFactory("okhttp-dispatch"))); + new DaemonThreadFactory("okhttp-dispatch", propagateContextForTestingInDispatcher))); } private OkHttpUtil() {} diff --git a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/RetryInterceptor.java b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/RetryInterceptor.java index ee7d5fc9177..988c8277c26 100644 --- a/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/RetryInterceptor.java +++ b/exporters/sender/okhttp/src/main/java/io/opentelemetry/exporter/sender/okhttp/internal/RetryInterceptor.java @@ -5,13 +5,22 @@ package io.opentelemetry.exporter.sender.okhttp.internal; +import static java.util.stream.Collectors.joining; + import io.opentelemetry.sdk.common.export.RetryPolicy; import java.io.IOException; +import java.net.ConnectException; +import java.net.SocketException; import java.net.SocketTimeoutException; -import java.util.Locale; +import java.net.UnknownHostException; +import java.util.StringJoiner; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.function.Function; +import java.util.function.Predicate; +import java.util.function.Supplier; +import java.util.logging.Level; +import java.util.logging.Logger; import okhttp3.Interceptor; import okhttp3.Response; @@ -23,34 +32,38 @@ */ public final class RetryInterceptor implements Interceptor { + private static final Logger logger = Logger.getLogger(RetryInterceptor.class.getName()); + private final RetryPolicy retryPolicy; private final Function isRetryable; - private final Function isRetryableException; + private final Predicate retryExceptionPredicate; private final Sleeper sleeper; - private final BoundedLongGenerator randomLong; + private final Supplier randomJitter; /** Constructs a new retrier. */ public RetryInterceptor(RetryPolicy retryPolicy, Function isRetryable) { this( retryPolicy, isRetryable, - RetryInterceptor::isRetryableException, + retryPolicy.getRetryExceptionPredicate() == null + ? RetryInterceptor::isRetryableException + : retryPolicy.getRetryExceptionPredicate(), TimeUnit.NANOSECONDS::sleep, - bound -> ThreadLocalRandom.current().nextLong(bound)); + () -> ThreadLocalRandom.current().nextDouble(0.8d, 1.2d)); } // Visible for testing RetryInterceptor( RetryPolicy retryPolicy, Function isRetryable, - Function isRetryableException, + Predicate retryExceptionPredicate, Sleeper sleeper, - BoundedLongGenerator randomLong) { + Supplier randomJitter) { this.retryPolicy = retryPolicy; this.isRetryable = isRetryable; - this.isRetryableException = isRetryableException; + this.retryExceptionPredicate = retryExceptionPredicate; this.sleeper = sleeper; - this.randomLong = randomLong; + this.randomJitter = randomJitter; } @Override @@ -63,9 +76,10 @@ public Response intercept(Chain chain) throws IOException { if (attempt > 0) { // Compute and sleep for backoff // https://github.com/grpc/proposal/blob/master/A6-client-retries.md#exponential-backoff - long upperBoundNanos = Math.min(nextBackoffNanos, retryPolicy.getMaxBackoff().toNanos()); - long backoffNanos = randomLong.get(upperBoundNanos); - nextBackoffNanos = (long) (nextBackoffNanos * retryPolicy.getBackoffMultiplier()); + long currentBackoffNanos = + Math.min(nextBackoffNanos, retryPolicy.getMaxBackoff().toNanos()); + long backoffNanos = (long) (randomJitter.get() * currentBackoffNanos); + nextBackoffNanos = (long) (currentBackoffNanos * retryPolicy.getBackoffMultiplier()); try { sleeper.sleep(backoffNanos); } catch (InterruptedException e) { @@ -76,21 +90,47 @@ public Response intercept(Chain chain) throws IOException { if (response != null) { response.close(); } + exception = null; } - - attempt++; try { response = chain.proceed(chain.request()); + if (response != null) { + boolean retryable = Boolean.TRUE.equals(isRetryable.apply(response)); + if (logger.isLoggable(Level.FINER)) { + logger.log( + Level.FINER, + "Attempt " + + attempt + + " returned " + + (retryable ? "retryable" : "non-retryable") + + " response: " + + responseStringRepresentation(response)); + } + if (!retryable) { + return response; + } + } else { + throw new NullPointerException("response cannot be null."); + } } catch (IOException e) { exception = e; + response = null; + boolean retryable = retryExceptionPredicate.test(exception); + if (logger.isLoggable(Level.FINER)) { + logger.log( + Level.FINER, + "Attempt " + + attempt + + " failed with " + + (retryable ? "retryable" : "non-retryable") + + " exception", + exception); + } + if (!retryable) { + throw exception; + } } - if (response != null && !Boolean.TRUE.equals(isRetryable.apply(response))) { - return response; - } - if (exception != null && !Boolean.TRUE.equals(isRetryableException.apply(exception))) { - throw exception; - } - } while (attempt < retryPolicy.getMaxAttempts()); + } while (++attempt < retryPolicy.getMaxAttempts()); if (response != null) { return response; @@ -98,20 +138,39 @@ public Response intercept(Chain chain) throws IOException { throw exception; } + private static String responseStringRepresentation(Response response) { + StringJoiner joiner = new StringJoiner(",", "Response{", "}"); + joiner.add("code=" + response.code()); + joiner.add( + "headers=" + + response.headers().toMultimap().entrySet().stream() + .map(entry -> entry.getKey() + "=" + String.join(",", entry.getValue())) + .collect(joining(",", "[", "]"))); + return joiner.toString(); + } + // Visible for testing - static boolean isRetryableException(IOException e) { - if (!(e instanceof SocketTimeoutException)) { - return false; - } - String message = e.getMessage(); - // Connect timeouts can produce SocketTimeoutExceptions with no message, or with "connect timed - // out" - return message == null || message.toLowerCase(Locale.ROOT).contains("connect timed out"); + boolean shouldRetryOnException(IOException e) { + return retryExceptionPredicate.test(e); } // Visible for testing - interface BoundedLongGenerator { - long get(long bound); + static boolean isRetryableException(IOException e) { + // Known retryable SocketTimeoutException messages: null, "connect timed out", "timeout" + // Known retryable ConnectTimeout messages: "Failed to connect to + // localhost/[0:0:0:0:0:0:0:1]:62611" + // Known retryable UnknownHostException messages: "xxxxxx.com" + // Known retryable SocketException: Socket closed + if (e instanceof SocketTimeoutException) { + return true; + } else if (e instanceof ConnectException) { + return true; + } else if (e instanceof UnknownHostException) { + return true; + } else if (e instanceof SocketException) { + return true; + } + return false; } // Visible for testing diff --git a/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/AbstractOkHttpSuppressionTest.java b/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/AbstractOkHttpSuppressionTest.java new file mode 100644 index 00000000000..4787bb8a610 --- /dev/null +++ b/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/AbstractOkHttpSuppressionTest.java @@ -0,0 +1,58 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.sender.okhttp.internal; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.opentelemetry.api.internal.InstrumentationUtil; +import io.opentelemetry.context.Context; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +abstract class AbstractOkHttpSuppressionTest { + + @BeforeEach + void setUp() { + OkHttpUtil.setPropagateContextForTestingInDispatcher(true); + } + + @AfterEach + void tearDown() { + OkHttpUtil.setPropagateContextForTestingInDispatcher(false); + } + + @Test + void testSuppressInstrumentation() throws InterruptedException { + CountDownLatch latch = new CountDownLatch(1); + AtomicBoolean suppressInstrumentation = new AtomicBoolean(false); + + Runnable onSuccess = Assertions::fail; + Runnable onFailure = + () -> { + suppressInstrumentation.set( + InstrumentationUtil.shouldSuppressInstrumentation(Context.current())); + latch.countDown(); + }; + + send(getSender(), onSuccess, onFailure); + + latch.await(); + + assertTrue(suppressInstrumentation.get()); + } + + abstract void send(T sender, Runnable onSuccess, Runnable onFailure); + + private T getSender() { + return createSender("https://none"); + } + + abstract T createSender(String endpoint); +} diff --git a/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/AuthenticatingExporterTest.java b/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/AuthenticatingExporterTest.java deleted file mode 100644 index e1c948f550c..00000000000 --- a/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/AuthenticatingExporterTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.sender.okhttp.internal; - -import static org.assertj.core.api.Assertions.assertThat; - -import com.linecorp.armeria.common.HttpResponse; -import com.linecorp.armeria.common.HttpStatus; -import com.linecorp.armeria.testing.junit5.server.mock.MockWebServerExtension; -import io.opentelemetry.exporter.internal.http.HttpExporter; -import io.opentelemetry.exporter.internal.http.HttpExporterBuilder; -import io.opentelemetry.exporter.internal.marshal.Marshaler; -import io.opentelemetry.exporter.internal.marshal.Serializer; -import io.opentelemetry.internal.testing.slf4j.SuppressLogger; -import io.opentelemetry.sdk.common.CompletableResultCode; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.jupiter.api.extension.RegisterExtension; -import org.mockito.junit.jupiter.MockitoExtension; - -/** Test Authentication in an exporter. */ -@ExtendWith(MockitoExtension.class) -class AuthenticatingExporterTest { - - @RegisterExtension static final MockWebServerExtension server = new MockWebServerExtension(); - private final Marshaler marshaler = - new Marshaler() { - @Override - public int getBinarySerializedSize() { - return 0; - } - - @Override - protected void writeTo(Serializer output) {} - }; - - @Test - void export() { - HttpExporter exporter = - new HttpExporterBuilder<>("otlp", "test", server.httpUri().toASCIIString()) - .setAuthenticator( - () -> { - Map headers = new HashMap<>(); - headers.put("Authorization", "auth"); - return headers; - }) - .build(); - - server.enqueue(HttpResponse.of(HttpStatus.UNAUTHORIZED)); - server.enqueue(HttpResponse.of(HttpStatus.OK)); - - CompletableResultCode result = exporter.export(marshaler, 0); - - assertThat(server.takeRequest().request().headers().get("Authorization")).isNull(); - assertThat(server.takeRequest().request().headers().get("Authorization")).isEqualTo("auth"); - - result.join(1, TimeUnit.MINUTES); - assertThat(result.isSuccess()).isTrue(); - } - - /** Ensure that exporter gives up if a request is always considered UNAUTHORIZED. */ - @Test - @SuppressLogger(HttpExporter.class) - void export_giveup() { - HttpExporter exporter = - new HttpExporterBuilder<>("otlp", "test", server.httpUri().toASCIIString()) - .setAuthenticator( - () -> { - server.enqueue(HttpResponse.of(HttpStatus.UNAUTHORIZED)); - return Collections.emptyMap(); - }) - .build(); - server.enqueue(HttpResponse.of(HttpStatus.UNAUTHORIZED)); - assertThat(exporter.export(marshaler, 0).join(1, TimeUnit.MINUTES).isSuccess()).isFalse(); - } -} diff --git a/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/HttpExporterBuilderTest.java b/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/HttpExporterBuilderTest.java deleted file mode 100644 index b7b80453be0..00000000000 --- a/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/HttpExporterBuilderTest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.exporter.sender.okhttp.internal; - -import static org.assertj.core.api.Assertions.assertThat; - -import io.opentelemetry.exporter.internal.http.HttpExporter; -import io.opentelemetry.exporter.internal.http.HttpExporterBuilder; -import io.opentelemetry.exporter.internal.marshal.Marshaler; -import org.junit.jupiter.api.Test; - -class HttpExporterBuilderTest { - - private final HttpExporterBuilder builder = - new HttpExporterBuilder<>("otlp", "span", "http://localhost:4318/v1/traces"); - - @Test - void compressionDefault() { - HttpExporter exporter = builder.build(); - try { - assertThat(exporter) - .isInstanceOfSatisfying( - HttpExporter.class, - otlp -> - assertThat(otlp) - .extracting("httpSender") - .isInstanceOf(OkHttpHttpSender.class) - .extracting("compressionEnabled") - .isEqualTo(false)); - } finally { - exporter.shutdown(); - } - } - - @Test - void compressionNone() { - HttpExporter exporter = builder.setCompression("none").build(); - try { - assertThat(exporter) - .isInstanceOfSatisfying( - HttpExporter.class, - otlp -> - assertThat(otlp) - .extracting("httpSender") - .isInstanceOf(OkHttpHttpSender.class) - .extracting("compressionEnabled") - .isEqualTo(false)); - } finally { - exporter.shutdown(); - } - } - - @Test - void compressionGzip() { - HttpExporter exporter = builder.setCompression("gzip").build(); - try { - assertThat(exporter) - .isInstanceOfSatisfying( - HttpExporter.class, - otlp -> - assertThat(otlp) - .extracting("httpSender") - .isInstanceOf(OkHttpHttpSender.class) - .extracting("compressionEnabled") - .isEqualTo(true)); - } finally { - exporter.shutdown(); - } - } - - @Test - void compressionEnabledAndDisabled() { - HttpExporter exporter = - builder.setCompression("gzip").setCompression("none").build(); - try { - assertThat(exporter) - .isInstanceOfSatisfying( - HttpExporter.class, - otlp -> - assertThat(otlp) - .extracting("httpSender") - .isInstanceOf(OkHttpHttpSender.class) - .extracting("compressionEnabled") - .isEqualTo(false)); - } finally { - exporter.shutdown(); - } - } -} diff --git a/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSenderTest.java b/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSenderTest.java new file mode 100644 index 00000000000..c1a61a8aea5 --- /dev/null +++ b/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSenderTest.java @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.sender.okhttp.internal; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.opentelemetry.exporter.internal.RetryUtil; +import io.opentelemetry.exporter.internal.grpc.GrpcExporterUtil; +import java.util.Set; +import okhttp3.MediaType; +import okhttp3.Protocol; +import okhttp3.Request; +import okhttp3.Response; +import okhttp3.ResponseBody; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +class OkHttpGrpcSenderTest { + + private static final String GRPC_STATUS = "grpc-status"; + private static final MediaType TEXT_PLAIN = MediaType.get("text/plain"); + + static Set provideRetryableGrpcStatusCodes() { + return RetryUtil.retryableGrpcStatusCodes(); + } + + @ParameterizedTest(name = "isRetryable should return true for GRPC status code: {0}") + @MethodSource("provideRetryableGrpcStatusCodes") + void isRetryable_RetryableGrpcStatus(String retryableGrpcStatus) { + Response response = createResponse(503, retryableGrpcStatus, "Retryable"); + boolean isRetryable = OkHttpGrpcSender.isRetryable(response); + assertTrue(isRetryable); + } + + @Test + void isRetryable_NonRetryableGrpcStatus() { + String nonRetryableGrpcStatus = + Integer.valueOf(GrpcExporterUtil.GRPC_STATUS_UNKNOWN).toString(); // INVALID_ARGUMENT + Response response = createResponse(503, nonRetryableGrpcStatus, "Non-retryable"); + boolean isRetryable = OkHttpGrpcSender.isRetryable(response); + assertFalse(isRetryable); + } + + private static Response createResponse(int httpCode, String grpcStatus, String message) { + return new Response.Builder() + .request(new Request.Builder().url("http://localhost/").build()) + .protocol(Protocol.HTTP_2) + .code(httpCode) + .body(ResponseBody.create("body", TEXT_PLAIN)) + .message(message) + .header(GRPC_STATUS, grpcStatus) + .build(); + } +} diff --git a/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSuppressionTest.java b/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSuppressionTest.java new file mode 100644 index 00000000000..39457f4e2de --- /dev/null +++ b/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpGrpcSuppressionTest.java @@ -0,0 +1,36 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.sender.okhttp.internal; + +import io.opentelemetry.exporter.internal.marshal.MarshalerWithSize; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import java.util.Collections; + +class OkHttpGrpcSuppressionTest + extends AbstractOkHttpSuppressionTest< + OkHttpGrpcSender> { + + @Override + void send(OkHttpGrpcSender sender, Runnable onSuccess, Runnable onFailure) { + sender.send(new DummyMarshaler(), grpcResponse -> {}, throwable -> onFailure.run()); + } + + @Override + OkHttpGrpcSender createSender(String endpoint) { + return new OkHttpGrpcSender<>( + "https://localhost", null, 10L, 10L, Collections::emptyMap, null, null, null, null); + } + + protected static class DummyMarshaler extends MarshalerWithSize { + + protected DummyMarshaler() { + super(0); + } + + @Override + protected void writeTo(Serializer output) {} + } +} diff --git a/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpHttpSuppressionTest.java b/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpHttpSuppressionTest.java new file mode 100644 index 00000000000..38686c36526 --- /dev/null +++ b/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/OkHttpHttpSuppressionTest.java @@ -0,0 +1,52 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.sender.okhttp.internal; + +import io.opentelemetry.exporter.internal.marshal.Marshaler; +import io.opentelemetry.exporter.internal.marshal.ProtoFieldInfo; +import io.opentelemetry.exporter.internal.marshal.Serializer; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Collections; + +class OkHttpHttpSuppressionTest extends AbstractOkHttpSuppressionTest { + + @Override + void send(OkHttpHttpSender sender, Runnable onSuccess, Runnable onFailure) { + byte[] content = "A".getBytes(StandardCharsets.UTF_8); + Marshaler marshaler = + new Marshaler() { + @Override + public int getBinarySerializedSize() { + return content.length; + } + + @Override + protected void writeTo(Serializer output) throws IOException { + output.serializeBytes(ProtoFieldInfo.create(1, 1, "field"), content); + } + }; + sender.send( + marshaler, content.length, (response) -> onSuccess.run(), (error) -> onFailure.run()); + } + + @Override + OkHttpHttpSender createSender(String endpoint) { + return new OkHttpHttpSender( + endpoint, + null, + false, + "text/plain", + 10L, + 10L, + Collections::emptyMap, + null, + null, + null, + null, + null); + } +} diff --git a/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/RetryInterceptorTest.java b/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/RetryInterceptorTest.java index f4b644a36f3..ca2ad7d5966 100644 --- a/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/RetryInterceptorTest.java +++ b/exporters/sender/okhttp/src/test/java/io/opentelemetry/exporter/sender/okhttp/internal/RetryInterceptorTest.java @@ -9,8 +9,9 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; @@ -23,10 +24,20 @@ import com.linecorp.armeria.testing.junit5.server.mock.MockWebServerExtension; import io.opentelemetry.sdk.common.export.RetryPolicy; import java.io.IOException; +import java.net.ConnectException; +import java.net.HttpRetryException; +import java.net.ServerSocket; +import java.net.SocketException; import java.net.SocketTimeoutException; +import java.net.UnknownHostException; import java.time.Duration; import java.util.concurrent.TimeUnit; -import java.util.function.Function; +import java.util.function.Predicate; +import java.util.function.Supplier; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Stream; +import okhttp3.Interceptor; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.Response; @@ -35,9 +46,13 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; import org.mockito.Mock; +import org.mockito.invocation.InvocationOnMock; import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.stubbing.Answer; @ExtendWith(MockitoExtension.class) class RetryInterceptorTest { @@ -45,38 +60,60 @@ class RetryInterceptorTest { @RegisterExtension static final MockWebServerExtension server = new MockWebServerExtension(); @Mock private RetryInterceptor.Sleeper sleeper; - @Mock private RetryInterceptor.BoundedLongGenerator random; - private Function isRetryableException; + @Mock private Supplier random; + private Predicate retryExceptionPredicate; private RetryInterceptor retrier; private OkHttpClient client; @BeforeEach void setUp() { - // Note: cannot replace this with lambda or method reference because we need to spy on it - isRetryableException = + Logger logger = java.util.logging.Logger.getLogger(RetryInterceptor.class.getName()); + logger.setLevel(Level.FINER); + retryExceptionPredicate = spy( - new Function() { + new Predicate() { @Override - public Boolean apply(IOException exception) { - return RetryInterceptor.isRetryableException(exception); + public boolean test(IOException e) { + return RetryInterceptor.isRetryableException(e) + || (e instanceof HttpRetryException + && e.getMessage().contains("timeout retry")); } }); + + RetryPolicy retryPolicy = + RetryPolicy.builder() + .setBackoffMultiplier(1.6) + .setInitialBackoff(Duration.ofSeconds(1)) + .setMaxBackoff(Duration.ofSeconds(2)) + .setMaxAttempts(5) + .setRetryExceptionPredicate(retryExceptionPredicate) + .build(); + retrier = new RetryInterceptor( - RetryPolicy.builder() - .setBackoffMultiplier(1.6) - .setInitialBackoff(Duration.ofSeconds(1)) - .setMaxBackoff(Duration.ofSeconds(2)) - .setMaxAttempts(5) - .build(), - r -> !r.isSuccessful(), - isRetryableException, - sleeper, - random); + retryPolicy, r -> !r.isSuccessful(), retryExceptionPredicate, sleeper, random); client = new OkHttpClient.Builder().addInterceptor(retrier).build(); } + @Test + void noRetryOnNullResponse() throws IOException { + Interceptor.Chain chain = mock(Interceptor.Chain.class); + when(chain.proceed(any())).thenReturn(null); + when(chain.request()) + .thenReturn(new Request.Builder().url(server.httpUri().toString()).build()); + assertThatThrownBy( + () -> { + retrier.intercept(chain); + }) + .isInstanceOf(NullPointerException.class) + .hasMessage("response cannot be null."); + + verifyNoInteractions(retryExceptionPredicate); + verifyNoInteractions(random); + verifyNoInteractions(sleeper); + } + @Test void noRetry() throws Exception { server.enqueue(HttpResponse.of(HttpStatus.OK)); @@ -95,17 +132,8 @@ void noRetry() throws Exception { @ValueSource(ints = {5, 6}) void backsOff(int attempts) throws Exception { succeedOnAttempt(attempts); - - // Will backoff 4 times - when(random.get((long) (TimeUnit.SECONDS.toNanos(1) * Math.pow(1.6, 0)))).thenReturn(100L); - when(random.get((long) (TimeUnit.SECONDS.toNanos(1) * Math.pow(1.6, 1)))).thenReturn(50L); - // Capped - when(random.get(TimeUnit.SECONDS.toNanos(2))).thenReturn(500L).thenReturn(510L); - - doNothing().when(sleeper).sleep(100); - doNothing().when(sleeper).sleep(50); - doNothing().when(sleeper).sleep(500); - doNothing().when(sleeper).sleep(510); + when(random.get()).thenReturn(1.0d); + doNothing().when(sleeper).sleep(anyLong()); try (Response response = sendRequest()) { if (attempts <= 5) { @@ -125,16 +153,26 @@ void interrupted() throws Exception { succeedOnAttempt(5); // Backs off twice, second is interrupted - when(random.get((long) (TimeUnit.SECONDS.toNanos(1) * Math.pow(1.6, 0)))).thenReturn(100L); - when(random.get((long) (TimeUnit.SECONDS.toNanos(1) * Math.pow(1.6, 1)))).thenReturn(50L); + when(random.get()).thenReturn(1.0d).thenReturn(1.0d); + doAnswer( + new Answer() { + int counter = 0; - doNothing().when(sleeper).sleep(100); - doThrow(new InterruptedException()).when(sleeper).sleep(50); + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + if (counter++ == 1) { + throw new InterruptedException(); + } + return null; + } + }) + .when(sleeper) + .sleep(anyLong()); try (Response response = sendRequest()) { assertThat(response.isSuccessful()).isFalse(); } - + verify(sleeper, times(2)).sleep(anyLong()); for (int i = 0; i < 2; i++) { server.takeRequest(0, TimeUnit.NANOSECONDS); } @@ -143,7 +181,7 @@ void interrupted() throws Exception { @Test void connectTimeout() throws Exception { client = connectTimeoutClient(); - when(random.get(anyLong())).thenReturn(1L); + when(random.get()).thenReturn(1.0d); doNothing().when(sleeper).sleep(anyLong()); // Connecting to a non-routable IP address to trigger connection error @@ -152,16 +190,44 @@ void connectTimeout() throws Exception { client.newCall(new Request.Builder().url("http://10.255.255.1").build()).execute()) .isInstanceOf(SocketTimeoutException.class); - verify(isRetryableException, times(5)).apply(any()); + verify(retryExceptionPredicate, times(5)).test(any()); + // Should retry maxAttempts, and sleep maxAttempts - 1 times + verify(sleeper, times(4)).sleep(anyLong()); + } + + @Test + void connectException() throws Exception { + client = connectTimeoutClient(); + when(random.get()).thenReturn(1.0d); + doNothing().when(sleeper).sleep(anyLong()); + + // Connecting to localhost on an unused port address to trigger java.net.ConnectException + int openPort = freePort(); + assertThatThrownBy( + () -> + client + .newCall(new Request.Builder().url("http://localhost:" + openPort).build()) + .execute()) + .isInstanceOfAny(ConnectException.class, SocketTimeoutException.class); + + verify(retryExceptionPredicate, times(5)).test(any()); // Should retry maxAttempts, and sleep maxAttempts - 1 times verify(sleeper, times(4)).sleep(anyLong()); } + private static int freePort() { + try (ServerSocket socket = new ServerSocket(0)) { + return socket.getLocalPort(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + @Test void nonRetryableException() throws InterruptedException { client = connectTimeoutClient(); - // Override isRetryableException so that no exception is retryable - when(isRetryableException.apply(any())).thenReturn(false); + // Override retryPredicate so that no exception is retryable + when(retryExceptionPredicate.test(any())).thenReturn(false); // Connecting to a non-routable IP address to trigger connection timeout assertThatThrownBy( @@ -169,7 +235,7 @@ void nonRetryableException() throws InterruptedException { client.newCall(new Request.Builder().url("http://10.255.255.1").build()).execute()) .isInstanceOf(SocketTimeoutException.class); - verify(isRetryableException, times(1)).apply(any()); + verify(retryExceptionPredicate, times(1)).test(any()); verify(sleeper, never()).sleep(anyLong()); } @@ -180,24 +246,61 @@ private OkHttpClient connectTimeoutClient() { .build(); } + @ParameterizedTest + @MethodSource("isRetryableExceptionArgs") + void isRetryableException(IOException exception, boolean expectedRetryResult) { + assertThat(retrier.shouldRetryOnException(exception)).isEqualTo(expectedRetryResult); + } + + private static Stream isRetryableExceptionArgs() { + return Stream.of( + // Should retry on SocketTimeoutExceptions + Arguments.of(new SocketTimeoutException("Connect timed out"), true), + Arguments.of(new SocketTimeoutException("connect timed out"), true), + Arguments.of(new SocketTimeoutException("timeout"), true), + Arguments.of(new SocketTimeoutException("Read timed out"), true), + Arguments.of(new SocketTimeoutException(), true), + // Should retry on UnknownHostExceptions + Arguments.of(new UnknownHostException("host"), true), + // Should retry on SocketException + Arguments.of(new SocketException("closed"), true), + // Should retry on ConnectException + Arguments.of( + new ConnectException("Failed to connect to localhost/[0:0:0:0:0:0:0:1]:62611"), true), + // Shouldn't retry other IOException + Arguments.of(new IOException("error"), false), + // Testing configured predicate + Arguments.of(new HttpRetryException("error", 400), false), + Arguments.of(new HttpRetryException("timeout retry", 400), true)); + } + @Test - void isRetryableException() { - // Should retry on connection timeouts, where error message is "Connect timed out" or "connect - // timed out" - assertThat( - RetryInterceptor.isRetryableException(new SocketTimeoutException("Connect timed out"))) - .isTrue(); + void isRetryableExceptionDefaultBehaviour() { + RetryInterceptor retryInterceptor = + new RetryInterceptor(RetryPolicy.getDefault(), OkHttpHttpSender::isRetryable); assertThat( - RetryInterceptor.isRetryableException(new SocketTimeoutException("connect timed out"))) + retryInterceptor.shouldRetryOnException( + new SocketTimeoutException("Connect timed out"))) .isTrue(); - // Shouldn't retry on read timeouts, where error message is "Read timed out" - assertThat(RetryInterceptor.isRetryableException(new SocketTimeoutException("Read timed out"))) + assertThat(retryInterceptor.shouldRetryOnException(new IOException("Connect timed out"))) .isFalse(); - // Shouldn't retry on write timeouts, where error message is "timeout", or other IOException - assertThat(RetryInterceptor.isRetryableException(new SocketTimeoutException("timeout"))) + } + + @Test + void isRetryableExceptionCustomRetryPredicate() { + RetryInterceptor retryInterceptor = + new RetryInterceptor( + RetryPolicy.builder() + .setRetryExceptionPredicate((IOException e) -> e.getMessage().equals("retry")) + .build(), + OkHttpHttpSender::isRetryable); + + assertThat(retryInterceptor.shouldRetryOnException(new IOException("some message"))).isFalse(); + assertThat(retryInterceptor.shouldRetryOnException(new IOException("retry"))).isTrue(); + assertThat( + retryInterceptor.shouldRetryOnException( + new SocketTimeoutException("Connect timed out"))) .isFalse(); - assertThat(RetryInterceptor.isRetryableException(new SocketTimeoutException())).isTrue(); - assertThat(RetryInterceptor.isRetryableException(new IOException("error"))).isFalse(); } private Response sendRequest() throws IOException { diff --git a/exporters/zipkin/build.gradle.kts b/exporters/zipkin/build.gradle.kts index ade51707932..1636cd7ace8 100644 --- a/exporters/zipkin/build.gradle.kts +++ b/exporters/zipkin/build.gradle.kts @@ -15,6 +15,7 @@ dependencies { implementation(project(":exporters:common")) implementation(project(":sdk-extensions:autoconfigure-spi")) + compileOnly(project(":api:incubator")) implementation("io.zipkin.reporter2:zipkin-sender-okhttp3") diff --git a/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/BytesEncoderAdapter.java b/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/BytesEncoderAdapter.java new file mode 100644 index 00000000000..8605eacd936 --- /dev/null +++ b/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/BytesEncoderAdapter.java @@ -0,0 +1,58 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.zipkin; + +import zipkin2.Span; +import zipkin2.reporter.BytesEncoder; +import zipkin2.reporter.Encoding; + +/** + * This supports the deprecated method {@link + * ZipkinSpanExporterBuilder#setEncoder(zipkin2.codec.BytesEncoder)}. + */ +final class BytesEncoderAdapter implements BytesEncoder { + private final zipkin2.codec.BytesEncoder delegate; + private final Encoding encoding; + + @SuppressWarnings("deprecation") // we have to use the deprecated thrift encoding to return it + BytesEncoderAdapter(zipkin2.codec.BytesEncoder delegate) { + this.delegate = delegate; + switch (delegate.encoding()) { + case JSON: + this.encoding = Encoding.JSON; + break; + case PROTO3: + this.encoding = Encoding.PROTO3; + break; + case THRIFT: + this.encoding = Encoding.THRIFT; + break; + default: + // Only possible if zipkin2 adds an encoding besides above, which is very unlikely. + throw new UnsupportedOperationException("unsupported encoding " + delegate.encoding()); + } + } + + @Override + public Encoding encoding() { + return encoding; + } + + @Override + public int sizeInBytes(Span span) { + return delegate.sizeInBytes(span); + } + + @Override + public byte[] encode(Span span) { + return delegate.encode(span); + } + + @Override + public String toString() { + return delegate.toString(); + } +} diff --git a/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/LocalInetAddressSupplier.java b/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/LocalInetAddressSupplier.java index 1ddb324edb6..a5376eb88a9 100644 --- a/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/LocalInetAddressSupplier.java +++ b/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/LocalInetAddressSupplier.java @@ -30,7 +30,7 @@ public InetAddress get() { return inetAddress; } - /** Logic borrowed from brave.internal.Platform.produceLocalEndpoint */ + /** Logic borrowed from brave.internal.Platform.produceLocalEndpoint. */ @Nullable private static InetAddress findLocalIp() { try { diff --git a/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporter.java b/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporter.java index 37d8c1fd022..a76d3177fd4 100644 --- a/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporter.java +++ b/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporter.java @@ -5,6 +5,7 @@ package io.opentelemetry.exporter.zipkin; +import io.opentelemetry.api.internal.InstrumentationUtil; import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.exporter.internal.ExporterMetrics; import io.opentelemetry.sdk.common.CompletableResultCode; @@ -19,11 +20,10 @@ import java.util.function.Supplier; import java.util.logging.Level; import java.util.logging.Logger; -import zipkin2.Callback; import zipkin2.Span; -import zipkin2.codec.BytesEncoder; -import zipkin2.codec.Encoding; -import zipkin2.reporter.Sender; +import zipkin2.reporter.BytesEncoder; +import zipkin2.reporter.BytesMessageSender; +import zipkin2.reporter.Encoding; /** * This class was based on the encoder; - private final Sender sender; + private final BytesMessageSender sender; private final ExporterMetrics exporterMetrics; private final OtelToZipkinSpanTransformer transformer; @@ -48,7 +48,7 @@ public final class ZipkinSpanExporter implements SpanExporter { ZipkinSpanExporter( ZipkinSpanExporterBuilder builder, BytesEncoder encoder, - Sender sender, + BytesMessageSender sender, Supplier meterProviderSupplier, OtelToZipkinSpanTransformer transformer) { this.builder = builder; @@ -76,25 +76,20 @@ public CompletableResultCode export(Collection spanDataList) { encodedSpans.add(encoder.encode(zipkinSpan)); } - CompletableResultCode result = new CompletableResultCode(); - sender - .sendSpans(encodedSpans) - .enqueue( - new Callback() { - @Override - public void onSuccess(Void value) { - exporterMetrics.addSuccess(numItems); - result.succeed(); - } - - @Override - public void onError(Throwable t) { - exporterMetrics.addFailed(numItems); - logger.log(Level.WARNING, "Failed to export spans", t); - result.fail(); - } - }); - return result; + CompletableResultCode resultCode = new CompletableResultCode(); + InstrumentationUtil.suppressInstrumentation( + () -> { + try { + sender.send(encodedSpans); + exporterMetrics.addSuccess(numItems); + resultCode.succeed(); + } catch (IOException | RuntimeException e) { + exporterMetrics.addFailed(numItems); + logger.log(Level.WARNING, "Failed to export spans", e); + resultCode.fail(); + } + }); + return resultCode; } @Override diff --git a/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporterBuilder.java b/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporterBuilder.java index 2691c700954..0f1fae95b9a 100644 --- a/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporterBuilder.java +++ b/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporterBuilder.java @@ -17,45 +17,77 @@ import java.util.function.Supplier; import javax.annotation.Nullable; import zipkin2.Span; -import zipkin2.codec.BytesEncoder; -import zipkin2.codec.SpanBytesEncoder; -import zipkin2.reporter.Sender; +import zipkin2.reporter.BytesEncoder; +import zipkin2.reporter.BytesMessageSender; +import zipkin2.reporter.SpanBytesEncoder; import zipkin2.reporter.okhttp3.OkHttpSender; /** Builder class for {@link ZipkinSpanExporter}. */ public final class ZipkinSpanExporterBuilder { private BytesEncoder encoder = SpanBytesEncoder.JSON_V2; private Supplier localIpAddressSupplier = LocalInetAddressSupplier.getInstance(); - @Nullable private Sender sender; + @Nullable private BytesMessageSender sender; private String endpoint = ZipkinSpanExporter.DEFAULT_ENDPOINT; // compression is enabled by default, because this is the default of OkHttpSender, // which is created when no custom sender is set (see OkHttpSender.Builder) private boolean compressionEnabled = true; - private long readTimeoutMillis = TimeUnit.SECONDS.toMillis(10); + private int readTimeoutMillis = (int) TimeUnit.SECONDS.toMillis(10); private Supplier meterProviderSupplier = GlobalOpenTelemetry::getMeterProvider; /** * Sets the Zipkin sender. Implements the client side of the span transport. An {@link * OkHttpSender} is a good default. * - *

The {@link Sender#close()} method will be called when the exporter is shut down. + *

The {@link BytesMessageSender#close()} method will be called when the exporter is shut down. * * @param sender the Zipkin sender implementation. * @return this. + * @deprecated Use {@link #setSender(BytesMessageSender)} insteead. */ - public ZipkinSpanExporterBuilder setSender(Sender sender) { + @Deprecated + public ZipkinSpanExporterBuilder setSender(zipkin2.reporter.Sender sender) { + return setSender((BytesMessageSender) sender); + } + + /** + * Sets the Zipkin sender. Implements the client side of the span transport. An {@link + * OkHttpSender} is a good default. + * + *

The {@link BytesMessageSender#close()} method will be called when the exporter is shut down. + * + * @param sender the Zipkin sender implementation. + * @return this. + * @since 1.35.0 + */ + public ZipkinSpanExporterBuilder setSender(BytesMessageSender sender) { requireNonNull(sender, "sender"); this.sender = sender; return this; } /** - * Sets the {@link BytesEncoder}, which controls the format used by the {@link Sender}. Defaults - * to the {@link SpanBytesEncoder#JSON_V2}. + * Sets the {@link zipkin2.codec.BytesEncoder}, which controls the format used by the {@link + * BytesMessageSender}. Defaults to the {@link zipkin2.codec.SpanBytesEncoder#JSON_V2}. + * + * @param encoder the {@code BytesEncoder} to use. + * @return this. + * @see zipkin2.codec.SpanBytesEncoder + * @deprecated Use {@link #setEncoder(BytesEncoder)} instead. + */ + @Deprecated + public ZipkinSpanExporterBuilder setEncoder(zipkin2.codec.BytesEncoder encoder) { + requireNonNull(encoder, "encoder"); + return setEncoder(new BytesEncoderAdapter(encoder)); + } + + /** + * Sets the {@link BytesEncoder}, which controls the format used by the {@link + * BytesMessageSender}. Defaults to the {@link SpanBytesEncoder#JSON_V2}. * * @param encoder the {@code BytesEncoder} to use. * @return this. * @see SpanBytesEncoder + * @since 1.35.0 */ public ZipkinSpanExporterBuilder setEncoder(BytesEncoder encoder) { requireNonNull(encoder, "encoder"); @@ -99,7 +131,7 @@ public ZipkinSpanExporterBuilder setEndpoint(String endpoint) { * supported compression methods include "gzip" and "none". * *

The compression method is ignored when a custom Zipkin sender is set via {@link - * #setSender(Sender)}. + * #setSender(BytesMessageSender)}. * * @param compressionMethod The compression method, ex. "gzip". * @return this. @@ -124,7 +156,8 @@ public ZipkinSpanExporterBuilder setCompression(String compressionMethod) { public ZipkinSpanExporterBuilder setReadTimeout(long timeout, TimeUnit unit) { requireNonNull(unit, "unit"); checkArgument(timeout >= 0, "timeout must be non-negative"); - this.readTimeoutMillis = unit.toMillis(timeout); + long timeoutMillis = timeout == 0 ? Long.MAX_VALUE : unit.toMillis(timeout); + this.readTimeoutMillis = (int) Math.min(timeoutMillis, Integer.MAX_VALUE); return this; } @@ -174,13 +207,13 @@ String toString(boolean includePrefixAndSuffix) { * @return a {@code ZipkinSpanExporter}. */ public ZipkinSpanExporter build() { - Sender sender = this.sender; + BytesMessageSender sender = this.sender; if (sender == null) { sender = OkHttpSender.newBuilder() .endpoint(endpoint) .compressionEnabled(compressionEnabled) - .readTimeout((int) readTimeoutMillis) + .readTimeout(readTimeoutMillis) .build(); } OtelToZipkinSpanTransformer transformer = diff --git a/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/internal/ZipkinSpanExporterComponentProvider.java b/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/internal/ZipkinSpanExporterComponentProvider.java new file mode 100644 index 00000000000..42c43eef14a --- /dev/null +++ b/exporters/zipkin/src/main/java/io/opentelemetry/exporter/zipkin/internal/ZipkinSpanExporterComponentProvider.java @@ -0,0 +1,48 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.zipkin.internal; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.exporter.zipkin.ZipkinSpanExporter; +import io.opentelemetry.exporter.zipkin.ZipkinSpanExporterBuilder; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import java.time.Duration; + +/** + * Declarative configuration SPI implementation for {@link ZipkinSpanExporter}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public class ZipkinSpanExporterComponentProvider implements ComponentProvider { + @Override + public Class getType() { + return SpanExporter.class; + } + + @Override + public String getName() { + return "zipkin"; + } + + @Override + public SpanExporter create(DeclarativeConfigProperties config) { + ZipkinSpanExporterBuilder builder = ZipkinSpanExporter.builder(); + + String endpoint = config.getString("endpoint"); + if (endpoint != null) { + builder.setEndpoint(endpoint); + } + + Long timeoutMs = config.getLong("timeout"); + if (timeoutMs != null) { + builder.setReadTimeout(Duration.ofMillis(timeoutMs)); + } + + return builder.build(); + } +} diff --git a/exporters/zipkin/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider b/exporters/zipkin/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider new file mode 100644 index 00000000000..08330b52dad --- /dev/null +++ b/exporters/zipkin/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider @@ -0,0 +1 @@ +io.opentelemetry.exporter.zipkin.internal.ZipkinSpanExporterComponentProvider diff --git a/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/BytesEncoderAdapterTest.java b/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/BytesEncoderAdapterTest.java new file mode 100644 index 00000000000..24b16eb3a06 --- /dev/null +++ b/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/BytesEncoderAdapterTest.java @@ -0,0 +1,70 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.exporter.zipkin; + +import static io.opentelemetry.exporter.zipkin.ZipkinTestUtil.PARENT_SPAN_ID; +import static io.opentelemetry.exporter.zipkin.ZipkinTestUtil.SPAN_ID; +import static io.opentelemetry.exporter.zipkin.ZipkinTestUtil.TRACE_ID; +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Test; +import zipkin2.Endpoint; +import zipkin2.Span; +import zipkin2.reporter.Encoding; +import zipkin2.reporter.SpanBytesEncoder; + +class BytesEncoderAdapterTest { + + /** Contains {@link Span#localEndpoint()} to ensure would be encoded differently. */ + private final Span testSpan = + Span.newBuilder() + .traceId(TRACE_ID) + .parentId(PARENT_SPAN_ID) + .id(SPAN_ID) + .localEndpoint(Endpoint.newBuilder().serviceName("test").build()) + .build(); + + @Test + void testJsonV2() { + BytesEncoderAdapter adapter = new BytesEncoderAdapter(zipkin2.codec.SpanBytesEncoder.JSON_V2); + assertThat(adapter.encoding()).isEqualTo(Encoding.JSON); + assertThat(adapter.encode(testSpan)).isEqualTo(SpanBytesEncoder.JSON_V2.encode(testSpan)); + assertThat(adapter.sizeInBytes(testSpan)) + .isEqualTo(SpanBytesEncoder.JSON_V2.sizeInBytes(testSpan)); + assertThat(adapter).hasToString(SpanBytesEncoder.JSON_V2.toString()); + } + + @Test + void testProtobuf() { + BytesEncoderAdapter adapter = new BytesEncoderAdapter(zipkin2.codec.SpanBytesEncoder.PROTO3); + assertThat(adapter.encoding()).isEqualTo(Encoding.PROTO3); + assertThat(adapter.encode(testSpan)).isEqualTo(SpanBytesEncoder.PROTO3.encode(testSpan)); + assertThat(adapter.sizeInBytes(testSpan)) + .isEqualTo(SpanBytesEncoder.PROTO3.sizeInBytes(testSpan)); + assertThat(adapter).hasToString(SpanBytesEncoder.PROTO3.toString()); + } + + @Test + @SuppressWarnings("deprecation") // we have to use the deprecated thrift encoding to test it + void testThrift() { + BytesEncoderAdapter adapter = new BytesEncoderAdapter(zipkin2.codec.SpanBytesEncoder.THRIFT); + assertThat(adapter.encoding()).isEqualTo(Encoding.THRIFT); + assertThat(adapter.encode(testSpan)).isEqualTo(SpanBytesEncoder.THRIFT.encode(testSpan)); + assertThat(adapter.sizeInBytes(testSpan)) + .isEqualTo(SpanBytesEncoder.THRIFT.sizeInBytes(testSpan)); + assertThat(adapter).hasToString(SpanBytesEncoder.THRIFT.toString()); + } + + @Test + void testJsonV1() { + BytesEncoderAdapter adapter = new BytesEncoderAdapter(zipkin2.codec.SpanBytesEncoder.JSON_V1); + assertThat(adapter.encoding()).isEqualTo(Encoding.JSON); + assertThat(adapter.encode(testSpan)).isEqualTo(SpanBytesEncoder.JSON_V1.encode(testSpan)); + assertThat(adapter.sizeInBytes(testSpan)) + .isEqualTo(SpanBytesEncoder.JSON_V1.sizeInBytes(testSpan)); + assertThat(adapter).hasToString(SpanBytesEncoder.JSON_V1.toString()); + } +} diff --git a/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporterEndToEndHttpTest.java b/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporterEndToEndHttpTest.java index d12a6b060e3..4ad9bdfb106 100644 --- a/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporterEndToEndHttpTest.java +++ b/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporterEndToEndHttpTest.java @@ -44,9 +44,10 @@ import org.testcontainers.junit.jupiter.Testcontainers; import zipkin2.Endpoint; import zipkin2.Span; -import zipkin2.codec.Encoding; import zipkin2.codec.SpanBytesDecoder; -import zipkin2.codec.SpanBytesEncoder; +import zipkin2.reporter.BytesMessageSender; +import zipkin2.reporter.Encoding; +import zipkin2.reporter.SpanBytesEncoder; import zipkin2.reporter.okhttp3.OkHttpSender; @Testcontainers(disabledWithoutDocker = true) @@ -81,8 +82,8 @@ class ZipkinSpanExporterEndToEndHttpTest { SEEN_ATTRIBUTES.toBuilder().put(AttributeKey.booleanKey("success"), false).build(); @Container - public static GenericContainer zipkinContainer = - new GenericContainer<>("ghcr.io/openzipkin/zipkin:2.23") + public static final GenericContainer zipkinContainer = + new GenericContainer<>("ghcr.io/openzipkin/zipkin:2.27") .withExposedPorts(ZIPKIN_API_PORT) .waitingFor(Wait.forHttp("/health").forPort(ZIPKIN_API_PORT)); @@ -175,8 +176,10 @@ void testExportFailedAsWrongEncoderUsed() { private static ZipkinSpanExporter buildZipkinExporter( String endpoint, Encoding encoding, SpanBytesEncoder encoder, MeterProvider meterProvider) { + BytesMessageSender sender = + OkHttpSender.newBuilder().endpoint(endpoint).encoding(encoding).build(); return ZipkinSpanExporter.builder() - .setSender(OkHttpSender.newBuilder().endpoint(endpoint).encoding(encoding).build()) + .setSender(sender) .setEncoder(encoder) .setMeterProvider(meterProvider) .setLocalIpAddressSupplier(() -> localIp) diff --git a/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporterTest.java b/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporterTest.java index 263edadad53..b89c273bbbb 100644 --- a/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporterTest.java +++ b/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/ZipkinSpanExporterTest.java @@ -9,13 +9,15 @@ import static io.opentelemetry.exporter.zipkin.ZipkinTestUtil.zipkinSpanBuilder; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doAnswer; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import io.github.netmikey.logunit.api.LogCapturer; +import io.opentelemetry.api.internal.InstrumentationUtil; import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.context.Context; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.testing.trace.TestSpanData; @@ -23,24 +25,25 @@ import java.net.InetAddress; import java.time.Duration; import java.util.Collections; +import java.util.List; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.RegisterExtension; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; -import zipkin2.Call; -import zipkin2.Callback; import zipkin2.Span; -import zipkin2.codec.SpanBytesEncoder; -import zipkin2.reporter.Sender; +import zipkin2.reporter.BytesEncoder; +import zipkin2.reporter.BytesMessageSender; +import zipkin2.reporter.Encoding; +import zipkin2.reporter.SpanBytesEncoder; @ExtendWith(MockitoExtension.class) class ZipkinSpanExporterTest { - @Mock private Sender mockSender; + @Mock private BytesMessageSender mockSender; @Mock private SpanBytesEncoder mockEncoder; - @Mock private Call mockZipkinCall; @Mock private OtelToZipkinSpanTransformer mockTransformer; @Mock private InetAddress localIp; @@ -48,7 +51,7 @@ class ZipkinSpanExporterTest { LogCapturer logs = LogCapturer.create().captureForType(ZipkinSpanExporter.class); @Test - void testExport() { + void testExport() throws IOException { TestSpanData testSpanData = spanBuilder().build(); ZipkinSpanExporter zipkinSpanExporter = @@ -66,25 +69,18 @@ void testExport() { .build(); when(mockTransformer.generateSpan(testSpanData)).thenReturn(zipkinSpan); when(mockEncoder.encode(zipkinSpan)).thenReturn(someBytes); - when(mockSender.sendSpans(Collections.singletonList(someBytes))).thenReturn(mockZipkinCall); - doAnswer( - invocation -> { - Callback callback = invocation.getArgument(0); - callback.onSuccess(null); - return null; - }) - .when(mockZipkinCall) - .enqueue(any()); CompletableResultCode resultCode = zipkinSpanExporter.export(Collections.singleton(testSpanData)); assertThat(resultCode.isSuccess()).isTrue(); + + verify(mockSender).send(Collections.singletonList(someBytes)); } @Test @SuppressLogger(ZipkinSpanExporter.class) - void testExport_failed() { + void testExport_failed() throws IOException { TestSpanData testSpanData = spanBuilder().build(); ZipkinSpanExporter zipkinSpanExporter = @@ -102,20 +98,14 @@ void testExport_failed() { .build(); when(mockTransformer.generateSpan(testSpanData)).thenReturn(zipkinSpan); when(mockEncoder.encode(zipkinSpan)).thenReturn(someBytes); - when(mockSender.sendSpans(Collections.singletonList(someBytes))).thenReturn(mockZipkinCall); - doAnswer( - invocation -> { - Callback callback = invocation.getArgument(0); - callback.onError(new IOException()); - return null; - }) - .when(mockZipkinCall) - .enqueue(any()); + doThrow(new IOException()).when(mockSender).send(Collections.singletonList(someBytes)); CompletableResultCode resultCode = zipkinSpanExporter.export(Collections.singleton(testSpanData)); assertThat(resultCode.isSuccess()).isFalse(); + + verify(mockSender).send(Collections.singletonList(someBytes)); } @Test @@ -144,7 +134,8 @@ void testShutdown() throws IOException { } @Test - @SuppressWarnings("PreferJavaTimeOverload") + @SuppressWarnings({"PreferJavaTimeOverload", "deprecation"}) + // we have to use the deprecated setEncoder overload to test it void invalidConfig() { assertThatThrownBy(() -> ZipkinSpanExporter.builder().setReadTimeout(-1, TimeUnit.MILLISECONDS)) .isInstanceOf(IllegalArgumentException.class) @@ -170,16 +161,40 @@ void invalidConfig() { .isInstanceOf(NullPointerException.class) .hasMessage("sender"); - assertThatThrownBy(() -> ZipkinSpanExporter.builder().setEncoder(null)) + assertThatThrownBy( + () -> ZipkinSpanExporter.builder().setEncoder((zipkin2.codec.BytesEncoder) null)) + .isInstanceOf(NullPointerException.class) + .hasMessage("encoder"); + + assertThatThrownBy(() -> ZipkinSpanExporter.builder().setEncoder((BytesEncoder) null)) .isInstanceOf(NullPointerException.class) .hasMessage("encoder"); } + @Test + void encoderProtobuf() { + @SuppressWarnings("deprecation") // we have to use the deprecated setEncoderto test it + ZipkinSpanExporter exporter = + ZipkinSpanExporter.builder().setEncoder(zipkin2.codec.SpanBytesEncoder.PROTO3).build(); + try { + assertThat(exporter).extracting("encoder.encoding").isEqualTo(Encoding.PROTO3); + } finally { + exporter.shutdown(); + } + + exporter = ZipkinSpanExporter.builder().setEncoder(SpanBytesEncoder.PROTO3).build(); + try { + assertThat(exporter).extracting("encoder").isEqualTo(SpanBytesEncoder.PROTO3); + } finally { + exporter.shutdown(); + } + } + @Test void compressionDefault() { ZipkinSpanExporter exporter = ZipkinSpanExporter.builder().build(); try { - assertThat(exporter).extracting("sender.compressionEnabled").isEqualTo(true); + assertThat(exporter).extracting("sender.delegate.compressionEnabled").isEqualTo(true); } finally { exporter.shutdown(); } @@ -189,7 +204,7 @@ void compressionDefault() { void compressionNone() { ZipkinSpanExporter exporter = ZipkinSpanExporter.builder().setCompression("none").build(); try { - assertThat(exporter).extracting("sender.compressionEnabled").isEqualTo(false); + assertThat(exporter).extracting("sender.delegate.compressionEnabled").isEqualTo(false); } finally { exporter.shutdown(); } @@ -199,7 +214,7 @@ void compressionNone() { void compressionGzip() { ZipkinSpanExporter exporter = ZipkinSpanExporter.builder().setCompression("gzip").build(); try { - assertThat(exporter).extracting("sender.compressionEnabled").isEqualTo(true); + assertThat(exporter).extracting("sender.delegate.compressionEnabled").isEqualTo(true); } finally { exporter.shutdown(); } @@ -210,7 +225,22 @@ void compressionEnabledAndDisabled() { ZipkinSpanExporter exporter = ZipkinSpanExporter.builder().setCompression("gzip").setCompression("none").build(); try { - assertThat(exporter).extracting("sender.compressionEnabled").isEqualTo(false); + assertThat(exporter).extracting("sender.delegate.compressionEnabled").isEqualTo(false); + } finally { + exporter.shutdown(); + } + } + + @Test + @SuppressWarnings("PreferJavaTimeOverload") + void readTimeout_Zero() { + ZipkinSpanExporter exporter = + ZipkinSpanExporter.builder().setReadTimeout(0, TimeUnit.SECONDS).build(); + + try { + assertThat(exporter) + .extracting("sender.delegate.client.readTimeoutMillis") + .isEqualTo(Integer.MAX_VALUE); } finally { exporter.shutdown(); } @@ -234,4 +264,57 @@ void stringRepresentation() { "ZipkinSpanExporter{endpoint=http://zipkin:9411/api/v2/spans, compressionEnabled=false, readTimeoutMillis=15000}"); } } + + @Test + void suppressInstrumentation() { + TestSpanData testSpanData = spanBuilder().build(); + + SuppressCatchingSender suppressCatchingSender = new SuppressCatchingSender(Encoding.JSON); + ZipkinSpanExporter zipkinSpanExporter = + new ZipkinSpanExporter( + new ZipkinSpanExporterBuilder(), + mockEncoder, + suppressCatchingSender, + MeterProvider::noop, + mockTransformer); + + byte[] someBytes = new byte[0]; + Span zipkinSpan = + zipkinSpanBuilder(Span.Kind.SERVER, localIp) + .putTag(OtelToZipkinSpanTransformer.OTEL_STATUS_CODE, "OK") + .build(); + when(mockTransformer.generateSpan(testSpanData)).thenReturn(zipkinSpan); + when(mockEncoder.encode(zipkinSpan)).thenReturn(someBytes); + + zipkinSpanExporter.export(Collections.singleton(testSpanData)); + + // Instrumentation should be suppressed on send, to avoid incidental spans related to span + // export. + assertTrue(suppressCatchingSender.sent.get()); + assertTrue(suppressCatchingSender.suppressed.get()); + } + + static class SuppressCatchingSender extends BytesMessageSender.Base { + + final AtomicBoolean sent = new AtomicBoolean(); + final AtomicBoolean suppressed = new AtomicBoolean(); + + protected SuppressCatchingSender(Encoding encoding) { + super(encoding); + } + + @Override + public int messageMaxBytes() { + return 1024; + } + + @Override + public void send(List list) throws IOException { + sent.set(true); + suppressed.set(InstrumentationUtil.shouldSuppressInstrumentation(Context.current())); + } + + @Override + public void close() throws IOException {} + } } diff --git a/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/internal/ZipkinSpanExporterProviderTest.java b/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/internal/ZipkinSpanExporterProviderTest.java index a0ad9a0a787..289fa1f7709 100644 --- a/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/internal/ZipkinSpanExporterProviderTest.java +++ b/exporters/zipkin/src/test/java/io/opentelemetry/exporter/zipkin/internal/ZipkinSpanExporterProviderTest.java @@ -13,7 +13,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; -import okhttp3.HttpUrl; import org.junit.jupiter.api.Test; class ZipkinSpanExporterProviderTest { @@ -32,13 +31,14 @@ void createExporter_Default() { assertThat(spanExporter).isInstanceOf(ZipkinSpanExporter.class); assertThat(spanExporter) .extracting("sender") + .extracting("delegate") .extracting("client") .extracting("readTimeoutMillis") .isEqualTo(10_000); assertThat(spanExporter) .extracting("sender") .extracting("endpoint") - .isEqualTo(HttpUrl.get("http://localhost:9411/api/v2/spans")); + .isEqualTo("http://localhost:9411/api/v2/spans"); } } @@ -53,13 +53,14 @@ void createExporter_WithConfiguration() { assertThat(spanExporter).isInstanceOf(ZipkinSpanExporter.class); assertThat(spanExporter) .extracting("sender") + .extracting("delegate") .extracting("client") .extracting("readTimeoutMillis") .isEqualTo(1000); assertThat(spanExporter) .extracting("sender") .extracting("endpoint") - .isEqualTo(HttpUrl.get("http://localhost:8080/spans")); + .isEqualTo("http://localhost:8080/spans"); } } } diff --git a/extensions/incubator/build.gradle.kts b/extensions/incubator/build.gradle.kts deleted file mode 100644 index 6acaf6cddb2..00000000000 --- a/extensions/incubator/build.gradle.kts +++ /dev/null @@ -1,16 +0,0 @@ -plugins { - id("otel.java-conventions") - id("otel.publish-conventions") - - id("otel.jmh-conventions") - id("otel.animalsniffer-conventions") -} - -description = "OpenTelemetry API Incubator" -otelJava.moduleName.set("io.opentelemetry.extension.incubator") - -dependencies { - api(project(":api:all")) - - testImplementation(project(":sdk:testing")) -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleCounterAdviceConfigurer.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleCounterAdviceConfigurer.java deleted file mode 100644 index 51c3a6c3447..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleCounterAdviceConfigurer.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.metrics.DoubleCounter; -import java.util.List; - -/** Configure advice for implementation of {@link DoubleCounter}. */ -public interface DoubleCounterAdviceConfigurer { - - /** Specify the recommended set of attribute keys to be used for this counter. */ - DoubleCounterAdviceConfigurer setAttributes(List> attributes); -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleGaugeAdviceConfigurer.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleGaugeAdviceConfigurer.java deleted file mode 100644 index 3c99c1d6df5..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleGaugeAdviceConfigurer.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.common.AttributeKey; -import java.util.List; - -/** Configure advice for implementation of {@code DoubleGauge}. */ -public interface DoubleGaugeAdviceConfigurer { - - /** Specify the recommended set of attribute keys to be used for this gauge. */ - DoubleGaugeAdviceConfigurer setAttributes(List> attributes); -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleHistogramAdviceConfigurer.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleHistogramAdviceConfigurer.java deleted file mode 100644 index 404a3db9d16..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleHistogramAdviceConfigurer.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.metrics.DoubleHistogram; -import java.util.List; - -/** Configure advice for implementations of {@link DoubleHistogram}. */ -public interface DoubleHistogramAdviceConfigurer { - - /** Specify recommended set of explicit bucket boundaries for this histogram. */ - DoubleHistogramAdviceConfigurer setExplicitBucketBoundaries(List bucketBoundaries); - - /** Specify the recommended set of attribute keys to be used for this histogram. */ - DoubleHistogramAdviceConfigurer setAttributes(List> attributes); -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleUpDownCounterAdviceConfigurer.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleUpDownCounterAdviceConfigurer.java deleted file mode 100644 index 06acffb4e3a..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/DoubleUpDownCounterAdviceConfigurer.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.metrics.DoubleUpDownCounter; -import java.util.List; - -/** Configure advice for implementation of {@link DoubleUpDownCounter}. */ -public interface DoubleUpDownCounterAdviceConfigurer { - - /** Specify the recommended set of attribute keys to be used for this up down counter. */ - DoubleUpDownCounterAdviceConfigurer setAttributes(List> attributes); -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedDoubleCounterBuilder.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedDoubleCounterBuilder.java deleted file mode 100644 index a6bcc97a8c2..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedDoubleCounterBuilder.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.metrics.DoubleCounterBuilder; -import java.util.function.Consumer; - -/** Extended {@link DoubleCounterBuilder} with experimental APIs. */ -public interface ExtendedDoubleCounterBuilder extends DoubleCounterBuilder { - - /** Specify advice for counter implementations. */ - default DoubleCounterBuilder setAdvice(Consumer adviceConsumer) { - return this; - } -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedDoubleGaugeBuilder.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedDoubleGaugeBuilder.java deleted file mode 100644 index 208dba0c32f..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedDoubleGaugeBuilder.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.metrics.DoubleGaugeBuilder; -import java.util.function.Consumer; - -/** Extended {@link DoubleGaugeBuilder} with experimental APIs. */ -public interface ExtendedDoubleGaugeBuilder extends DoubleGaugeBuilder { - - /** - * Builds and returns a DoubleGauge instrument with the configuration. - * - *

NOTE: This produces a synchronous gauge which records gauge values as they occur. Most users - * will want to instead register an {@link #buildWithCallback(Consumer)} to asynchronously observe - * the value of the gauge when metrics are collected. - * - * @return The DoubleGauge instrument. - */ - DoubleGauge build(); - - /** Specify advice for gauge implementations. */ - default DoubleGaugeBuilder setAdvice(Consumer adviceConsumer) { - return this; - } -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedDoubleHistogramBuilder.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedDoubleHistogramBuilder.java deleted file mode 100644 index c48cf9420de..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedDoubleHistogramBuilder.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.metrics.DoubleHistogramBuilder; -import java.util.function.Consumer; - -/** Extended {@link DoubleHistogramBuilder} with experimental APIs. */ -public interface ExtendedDoubleHistogramBuilder extends DoubleHistogramBuilder { - - /** Specify advice for histogram implementations. */ - default DoubleHistogramBuilder setAdvice( - Consumer adviceConsumer) { - return this; - } -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedDoubleUpDownCounterBuilder.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedDoubleUpDownCounterBuilder.java deleted file mode 100644 index b50e0e155db..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedDoubleUpDownCounterBuilder.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.metrics.DoubleUpDownCounterBuilder; -import java.util.function.Consumer; - -/** Extended {@link DoubleUpDownCounterBuilder} with experimental APIs. */ -public interface ExtendedDoubleUpDownCounterBuilder extends DoubleUpDownCounterBuilder { - - /** Specify advice for up down counter implementations. */ - default DoubleUpDownCounterBuilder setAdvice( - Consumer adviceConsumer) { - return this; - } -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedLongCounterBuilder.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedLongCounterBuilder.java deleted file mode 100644 index 9171156d8ae..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedLongCounterBuilder.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.metrics.LongCounterBuilder; -import java.util.function.Consumer; - -/** Extended {@link LongCounterBuilder} with experimental APIs. */ -public interface ExtendedLongCounterBuilder extends LongCounterBuilder { - - /** Specify advice for counter implementations. */ - default LongCounterBuilder setAdvice(Consumer adviceConsumer) { - return this; - } -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedLongGaugeBuilder.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedLongGaugeBuilder.java deleted file mode 100644 index 1c42952834f..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedLongGaugeBuilder.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.metrics.LongGaugeBuilder; -import java.util.function.Consumer; - -/** Extended {@link LongGaugeBuilder} with experimental APIs. */ -public interface ExtendedLongGaugeBuilder extends LongGaugeBuilder { - - /** - * Builds and returns a LongGauge instrument with the configuration. - * - *

NOTE: This produces a synchronous gauge which records gauge values as they occur. Most users - * will want to instead register an {@link #buildWithCallback(Consumer)} to asynchronously observe - * the value of the gauge when metrics are collected. - * - * @return The LongGauge instrument. - */ - LongGauge build(); - - /** Specify advice for gauge implementations. */ - default LongGaugeBuilder setAdvice(Consumer adviceConsumer) { - return this; - } -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedLongHistogramBuilder.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedLongHistogramBuilder.java deleted file mode 100644 index 211c86fe7cd..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedLongHistogramBuilder.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.metrics.LongHistogramBuilder; -import java.util.function.Consumer; - -/** Extended {@link LongHistogramBuilder} with experimental APIs. */ -public interface ExtendedLongHistogramBuilder extends LongHistogramBuilder { - - /** Specify advice for histogram implementations. */ - default LongHistogramBuilder setAdvice(Consumer adviceConsumer) { - return this; - } -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedLongUpDownCounterBuilder.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedLongUpDownCounterBuilder.java deleted file mode 100644 index 75cb286376e..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/ExtendedLongUpDownCounterBuilder.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.metrics.LongUpDownCounterBuilder; -import java.util.function.Consumer; - -/** Extended {@link LongUpDownCounterBuilder} with experimental APIs. */ -public interface ExtendedLongUpDownCounterBuilder extends LongUpDownCounterBuilder { - - /** Specify advice for up down counter implementations. */ - default LongUpDownCounterBuilder setAdvice( - Consumer adviceConsumer) { - return this; - } -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongCounterAdviceConfigurer.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongCounterAdviceConfigurer.java deleted file mode 100644 index 3a0e9bbe70d..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongCounterAdviceConfigurer.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.metrics.LongCounter; -import java.util.List; - -/** Configure advice for implementation of {@link LongCounter}. */ -public interface LongCounterAdviceConfigurer { - - /** Specify the recommended set of attribute keys to be used for this counter. */ - LongCounterAdviceConfigurer setAttributes(List> attributes); -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongGaugeAdviceConfigurer.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongGaugeAdviceConfigurer.java deleted file mode 100644 index c424a878971..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongGaugeAdviceConfigurer.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.common.AttributeKey; -import java.util.List; - -/** Configure advice for implementation of {@code LongGauge}. */ -public interface LongGaugeAdviceConfigurer { - - /** Specify the recommended set of attribute keys to be used for this gauge. */ - LongGaugeAdviceConfigurer setAttributes(List> attributes); -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongHistogramAdviceConfigurer.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongHistogramAdviceConfigurer.java deleted file mode 100644 index 0c5fc91ad27..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongHistogramAdviceConfigurer.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.metrics.LongHistogram; -import java.util.List; - -/** Configure advice for implementations of {@link LongHistogram}. */ -public interface LongHistogramAdviceConfigurer { - - /** Specify recommended set of explicit bucket boundaries for this histogram. */ - LongHistogramAdviceConfigurer setExplicitBucketBoundaries(List bucketBoundaries); - - /** Specify the recommended set of attribute keys to be used for this histogram. */ - LongHistogramAdviceConfigurer setAttributes(List> attributes); -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongUpDownCounterAdviceConfigurer.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongUpDownCounterAdviceConfigurer.java deleted file mode 100644 index 5090a824997..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/metrics/LongUpDownCounterAdviceConfigurer.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.metrics; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.metrics.LongUpDownCounter; -import java.util.List; - -/** Configure advice for implementation of {@link LongUpDownCounter}. */ -public interface LongUpDownCounterAdviceConfigurer { - - /** Specify the recommended set of attribute keys to be used for this up down counter. */ - LongUpDownCounterAdviceConfigurer setAttributes(List> attributes); -} diff --git a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/trace/ExtendedTracer.java b/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/trace/ExtendedTracer.java deleted file mode 100644 index 1be6bc899fa..00000000000 --- a/extensions/incubator/src/main/java/io/opentelemetry/extension/incubator/trace/ExtendedTracer.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.trace; - -import io.opentelemetry.api.trace.Span; -import io.opentelemetry.api.trace.SpanBuilder; -import io.opentelemetry.api.trace.Tracer; -import io.opentelemetry.context.Scope; -import java.util.concurrent.Callable; - -/** Provides easy mechanisms for wrapping standard Java constructs with an OpenTelemetry Span. */ -public final class ExtendedTracer implements Tracer { - - private final Tracer delegate; - - /** Create a new {@link ExtendedTracer} that wraps the provided Tracer. */ - public static ExtendedTracer create(Tracer delegate) { - return new ExtendedTracer(delegate); - } - - private ExtendedTracer(Tracer delegate) { - this.delegate = delegate; - } - - /** Run the provided {@link Runnable} and wrap with a {@link Span} with the provided name. */ - public void run(String spanName, Runnable runnable) { - Span span = delegate.spanBuilder(spanName).startSpan(); - try (Scope scope = span.makeCurrent()) { - runnable.run(); - } catch (Throwable e) { - span.recordException(e); - throw e; - } finally { - span.end(); - } - } - - /** Call the provided {@link Callable} and wrap with a {@link Span} with the provided name. */ - public T call(String spanName, Callable callable) throws Exception { - Span span = delegate.spanBuilder(spanName).startSpan(); - try (Scope scope = span.makeCurrent()) { - return callable.call(); - } catch (Throwable e) { - span.recordException(e); - throw e; - } finally { - span.end(); - } - } - - @Override - public SpanBuilder spanBuilder(String spanName) { - return delegate.spanBuilder(spanName); - } -} diff --git a/extensions/incubator/src/test/java/io/opentelemetry/extension/incubator/trace/ExtendedTracerTest.java b/extensions/incubator/src/test/java/io/opentelemetry/extension/incubator/trace/ExtendedTracerTest.java deleted file mode 100644 index 537fb8aa447..00000000000 --- a/extensions/incubator/src/test/java/io/opentelemetry/extension/incubator/trace/ExtendedTracerTest.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.extension.incubator.trace; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.trace.Span; -import io.opentelemetry.api.trace.Tracer; -import io.opentelemetry.sdk.testing.junit5.OpenTelemetryExtension; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -class ExtendedTracerTest { - @RegisterExtension - static final OpenTelemetryExtension otelTesting = OpenTelemetryExtension.create(); - - private final Tracer tracer = otelTesting.getOpenTelemetry().getTracer("test"); - - @Test - void runRunnable() { - ExtendedTracer.create(tracer).run("testSpan", () -> Span.current().setAttribute("one", 1)); - - otelTesting - .assertTraces() - .hasTracesSatisfyingExactly( - traceAssert -> - traceAssert.hasSpansSatisfyingExactly( - spanDataAssert -> - spanDataAssert - .hasName("testSpan") - .hasAttributes(Attributes.of(AttributeKey.longKey("one"), 1L)))); - } - - @Test - void runRunnable_throws() { - assertThatThrownBy( - () -> - ExtendedTracer.create(tracer) - .run( - "throwingRunnable", - () -> { - Span.current().setAttribute("one", 1); - throw new RuntimeException("failed"); - })) - .isInstanceOf(RuntimeException.class); - - otelTesting - .assertTraces() - .hasTracesSatisfyingExactly( - traceAssert -> - traceAssert.hasSpansSatisfyingExactly( - span -> - span.hasName("throwingRunnable") - .hasAttributes(Attributes.of(AttributeKey.longKey("one"), 1L)) - .hasEventsSatisfying( - (events) -> - assertThat(events) - .singleElement() - .satisfies( - eventData -> - assertThat(eventData.getName()) - .isEqualTo("exception"))))); - } - - @Test - void callCallable() throws Exception { - assertThat( - ExtendedTracer.create(tracer) - .call( - "spanCallable", - () -> { - Span.current().setAttribute("one", 1); - return "hello"; - })) - .isEqualTo("hello"); - - otelTesting - .assertTraces() - .hasTracesSatisfyingExactly( - traceAssert -> - traceAssert.hasSpansSatisfyingExactly( - spanDataAssert -> - spanDataAssert - .hasName("spanCallable") - .hasAttributes(Attributes.of(AttributeKey.longKey("one"), 1L)))); - } - - @Test - void callCallable_throws() { - assertThatThrownBy( - () -> - ExtendedTracer.create(tracer) - .call( - "throwingCallable", - () -> { - Span.current().setAttribute("one", 1); - throw new RuntimeException("failed"); - })) - .isInstanceOf(RuntimeException.class); - - otelTesting - .assertTraces() - .hasTracesSatisfyingExactly( - traceAssert -> - traceAssert.hasSpansSatisfyingExactly( - spanDataAssert -> - spanDataAssert - .hasName("throwingCallable") - .hasAttributes(Attributes.of(AttributeKey.longKey("one"), 1L)))); - } -} diff --git a/extensions/kotlin/README.md b/extensions/kotlin/README.md new file mode 100644 index 00000000000..3637e9e6e26 --- /dev/null +++ b/extensions/kotlin/README.md @@ -0,0 +1,12 @@ +# OpenTelemetry Kotlin Extension + +Kotlin [Extensions](src/main/kotlin/io/opentelemetry/extension/kotlin/ContextExtensions.kt) to propagate +OpenTelemetry context into coroutines. + +For example, you could do the following with coroutines + +```kotlin +launch(Context.current().asContextElement()) { +// trace ids propagated here +} +``` diff --git a/extensions/kotlin/build.gradle.kts b/extensions/kotlin/build.gradle.kts index 14a43ef1499..7409cb49f29 100644 --- a/extensions/kotlin/build.gradle.kts +++ b/extensions/kotlin/build.gradle.kts @@ -48,6 +48,7 @@ tasks { withType(KotlinCompile::class) { kotlinOptions { jvmTarget = "1.8" + languageVersion = "1.6" } } diff --git a/extensions/kotlin/src/main/kotlin/io/opentelemetry/extension/kotlin/ContextExtensions.kt b/extensions/kotlin/src/main/kotlin/io/opentelemetry/extension/kotlin/ContextExtensions.kt index 78f5a43e3f7..21e44222cac 100644 --- a/extensions/kotlin/src/main/kotlin/io/opentelemetry/extension/kotlin/ContextExtensions.kt +++ b/extensions/kotlin/src/main/kotlin/io/opentelemetry/extension/kotlin/ContextExtensions.kt @@ -13,17 +13,13 @@ import kotlin.coroutines.CoroutineContext * Returns a [CoroutineContext] which will make this [Context] current when resuming a coroutine * and restores the previous [Context] on suspension. */ -fun Context.asContextElement(): CoroutineContext { - return KotlinContextElement(this) -} +fun Context.asContextElement(): CoroutineContext = KotlinContextElement(this) /** * Returns a [CoroutineContext] which will make this [ImplicitContextKeyed] current when resuming a * coroutine and restores the previous [Context] on suspension. */ -fun ImplicitContextKeyed.asContextElement(): CoroutineContext { - return KotlinContextElement(Context.current().with(this)) -} +fun ImplicitContextKeyed.asContextElement(): CoroutineContext = KotlinContextElement(Context.current().with(this)) /** * Returns the [Context] in this [CoroutineContext] if present, or the root otherwise. diff --git a/extensions/trace-propagators/build.gradle.kts b/extensions/trace-propagators/build.gradle.kts index 0683986978d..8ede73f0b0e 100644 --- a/extensions/trace-propagators/build.gradle.kts +++ b/extensions/trace-propagators/build.gradle.kts @@ -12,6 +12,7 @@ otelJava.moduleName.set("io.opentelemetry.extension.trace.propagation") dependencies { api(project(":api:all")) + compileOnly(project(":api:incubator")) compileOnly(project(":sdk-extensions:autoconfigure-spi")) testImplementation("io.jaegertracing:jaeger-client") diff --git a/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/Common.java b/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/Common.java index aabbc190e03..9d04262e7d1 100644 --- a/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/Common.java +++ b/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/Common.java @@ -29,6 +29,8 @@ final class Common { static final int MAX_TRACE_ID_LENGTH = TraceId.getLength(); static final int MIN_TRACE_ID_LENGTH = MAX_TRACE_ID_LENGTH / 2; + static final int MAX_SPAN_ID_LENGTH = SpanId.getLength(); + private Common() {} static SpanContext buildSpanContext( @@ -44,7 +46,7 @@ static SpanContext buildSpanContext( return SpanContext.createFromRemoteParent( StringUtils.padLeft(traceId, MAX_TRACE_ID_LENGTH), - spanId, + StringUtils.padLeft(spanId, MAX_SPAN_ID_LENGTH), traceFlags, TraceState.getDefault()); } catch (RuntimeException e) { diff --git a/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/OtTracePropagator.java b/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/OtTracePropagator.java index 3bd0ed771fe..3106382abd4 100644 --- a/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/OtTracePropagator.java +++ b/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/OtTracePropagator.java @@ -5,6 +5,7 @@ package io.opentelemetry.extension.trace.propagation; +import static io.opentelemetry.extension.trace.propagation.Common.MAX_SPAN_ID_LENGTH; import static io.opentelemetry.extension.trace.propagation.Common.MAX_TRACE_ID_LENGTH; import io.opentelemetry.api.baggage.Baggage; @@ -12,6 +13,7 @@ import io.opentelemetry.api.internal.StringUtils; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.SpanId; import io.opentelemetry.api.trace.TraceId; import io.opentelemetry.context.Context; import io.opentelemetry.context.propagation.TextMapGetter; @@ -20,6 +22,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Locale; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; @@ -95,7 +98,13 @@ public Context extract(Context context, @Nullable C carrier, TextMapGetter Context extract(Context context, @Nullable C carrier, TextMapGetterThis class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class B3ComponentProvider implements ComponentProvider { + + @Override + public Class getType() { + return TextMapPropagator.class; + } + + @Override + public String getName() { + return "b3"; + } + + @Override + public TextMapPropagator create(DeclarativeConfigProperties config) { + return B3Propagator.injectingSingleHeader(); + } +} diff --git a/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/internal/B3MultiComponentProvider.java b/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/internal/B3MultiComponentProvider.java new file mode 100644 index 00000000000..0fb223d81a7 --- /dev/null +++ b/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/internal/B3MultiComponentProvider.java @@ -0,0 +1,36 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.extension.trace.propagation.internal; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.context.propagation.TextMapPropagator; +import io.opentelemetry.extension.trace.propagation.B3Propagator; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; + +/** + * Declarative configuration SPI implementation for {@link B3Propagator} which allows enables the + * {@link B3Propagator#injectingMultiHeaders()}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class B3MultiComponentProvider implements ComponentProvider { + + @Override + public Class getType() { + return TextMapPropagator.class; + } + + @Override + public String getName() { + return "b3multi"; + } + + @Override + public TextMapPropagator create(DeclarativeConfigProperties config) { + return B3Propagator.injectingMultiHeaders(); + } +} diff --git a/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/internal/JaegerComponentProvider.java b/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/internal/JaegerComponentProvider.java new file mode 100644 index 00000000000..1326dd4cc83 --- /dev/null +++ b/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/internal/JaegerComponentProvider.java @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.extension.trace.propagation.internal; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.context.propagation.TextMapPropagator; +import io.opentelemetry.extension.trace.propagation.JaegerPropagator; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; + +/** + * Declarative configuration SPI implementation for {@link JaegerPropagator}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class JaegerComponentProvider implements ComponentProvider { + + @Override + public Class getType() { + return TextMapPropagator.class; + } + + @Override + public String getName() { + return "jaeger"; + } + + @Override + public TextMapPropagator create(DeclarativeConfigProperties config) { + return JaegerPropagator.getInstance(); + } +} diff --git a/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/internal/OtTraceComponentProvider.java b/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/internal/OtTraceComponentProvider.java new file mode 100644 index 00000000000..261eb5f8585 --- /dev/null +++ b/extensions/trace-propagators/src/main/java/io/opentelemetry/extension/trace/propagation/internal/OtTraceComponentProvider.java @@ -0,0 +1,36 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.extension.trace.propagation.internal; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.context.propagation.TextMapPropagator; +import io.opentelemetry.extension.trace.propagation.B3Propagator; +import io.opentelemetry.extension.trace.propagation.OtTracePropagator; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; + +/** + * Declarative configuration SPI implementation for {@link B3Propagator}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class OtTraceComponentProvider implements ComponentProvider { + + @Override + public Class getType() { + return TextMapPropagator.class; + } + + @Override + public String getName() { + return "ottrace"; + } + + @Override + public TextMapPropagator create(DeclarativeConfigProperties config) { + return OtTracePropagator.getInstance(); + } +} diff --git a/extensions/trace-propagators/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider b/extensions/trace-propagators/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider new file mode 100644 index 00000000000..d4194f2b014 --- /dev/null +++ b/extensions/trace-propagators/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider @@ -0,0 +1,4 @@ +io.opentelemetry.extension.trace.propagation.internal.B3ComponentProvider +io.opentelemetry.extension.trace.propagation.internal.B3MultiComponentProvider +io.opentelemetry.extension.trace.propagation.internal.JaegerComponentProvider +io.opentelemetry.extension.trace.propagation.internal.OtTraceComponentProvider diff --git a/extensions/trace-propagators/src/test/java/io/opentelemetry/extension/trace/propagation/OtTracePropagatorTest.java b/extensions/trace-propagators/src/test/java/io/opentelemetry/extension/trace/propagation/OtTracePropagatorTest.java index 39e4bbfc0d7..22001427a94 100644 --- a/extensions/trace-propagators/src/test/java/io/opentelemetry/extension/trace/propagation/OtTracePropagatorTest.java +++ b/extensions/trace-propagators/src/test/java/io/opentelemetry/extension/trace/propagation/OtTracePropagatorTest.java @@ -20,6 +20,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; +import java.util.Locale; import java.util.Map; import javax.annotation.Nullable; import org.junit.jupiter.api.Test; @@ -31,6 +32,8 @@ class OtTracePropagatorTest { private static final String SHORT_TRACE_ID = "ff00000000000000"; private static final String SHORT_TRACE_ID_FULL = "0000000000000000ff00000000000000"; private static final String SPAN_ID = "ff00000000000041"; + private static final String SHORT_SPAN_ID = "f00000000000041"; + private static final String SHORT_SPAN_ID_FULL = "0f00000000000041"; private static final TextMapSetter> setter = Map::put; private static final TextMapGetter> getter = new TextMapGetter>() { @@ -55,6 +58,18 @@ private static Context withSpanContext(SpanContext spanContext, Context context) return context.with(Span.wrap(spanContext)); } + private static String capitalizeFirstLetter(String input, String delimiter) { + String[] words = input.split(delimiter); + + for (int i = 0; i < words.length; i++) { + String firstLetter = words[i].substring(0, 1).toUpperCase(Locale.ROOT); + String restOfWord = words[i].substring(1).toLowerCase(Locale.ROOT); + words[i] = firstLetter + restOfWord; + } + + return String.join(delimiter, words); + } + @Test void inject_invalidContext() { Map carrier = new LinkedHashMap<>(); @@ -248,6 +263,45 @@ void extract_NotSampledContext_Short_TraceId() { SHORT_TRACE_ID_FULL, SPAN_ID, TraceFlags.getDefault(), TraceState.getDefault())); } + @Test + void extract_SampledContext_Int_Short_SPanId() { + Map carrier = new LinkedHashMap<>(); + carrier.put(OtTracePropagator.TRACE_ID_HEADER, TRACE_ID); + carrier.put(OtTracePropagator.SPAN_ID_HEADER, SHORT_SPAN_ID); + carrier.put(OtTracePropagator.SAMPLED_HEADER, Common.TRUE_INT); + + assertThat(getSpanContext(propagator.extract(Context.current(), carrier, getter))) + .isEqualTo( + SpanContext.createFromRemoteParent( + TRACE_ID, SHORT_SPAN_ID_FULL, TraceFlags.getSampled(), TraceState.getDefault())); + } + + @Test + void extract_SampledContext_Bool_Short_SpanId() { + Map carrier = new LinkedHashMap<>(); + carrier.put(OtTracePropagator.TRACE_ID_HEADER, TRACE_ID); + carrier.put(OtTracePropagator.SPAN_ID_HEADER, SHORT_SPAN_ID); + carrier.put(OtTracePropagator.SAMPLED_HEADER, "true"); + + assertThat(getSpanContext(propagator.extract(Context.current(), carrier, getter))) + .isEqualTo( + SpanContext.createFromRemoteParent( + TRACE_ID, SHORT_SPAN_ID_FULL, TraceFlags.getSampled(), TraceState.getDefault())); + } + + @Test + void extract_NotSampledContext_Short_SpanId() { + Map carrier = new LinkedHashMap<>(); + carrier.put(OtTracePropagator.TRACE_ID_HEADER, TRACE_ID); + carrier.put(OtTracePropagator.SPAN_ID_HEADER, SHORT_SPAN_ID); + carrier.put(OtTracePropagator.SAMPLED_HEADER, Common.FALSE_INT); + + assertThat(getSpanContext(propagator.extract(Context.current(), carrier, getter))) + .isEqualTo( + SpanContext.createFromRemoteParent( + TRACE_ID, SHORT_SPAN_ID_FULL, TraceFlags.getDefault(), TraceState.getDefault())); + } + @Test void extract_InvalidTraceId() { Map invalidHeaders = new LinkedHashMap<>(); @@ -313,6 +367,22 @@ void extract_Baggage() { assertThat(Baggage.fromContext(context)).isEqualTo(expectedBaggage); } + @Test + void extract_Baggage_CapitalizedHeaders() { + String capitalizedBaggageHeader = + capitalizeFirstLetter(OtTracePropagator.PREFIX_BAGGAGE_HEADER + "some-key", "-"); + Map carrier = new LinkedHashMap<>(); + carrier.put(OtTracePropagator.TRACE_ID_HEADER, TRACE_ID); + carrier.put(OtTracePropagator.SPAN_ID_HEADER, SPAN_ID); + carrier.put(OtTracePropagator.SAMPLED_HEADER, Common.TRUE_INT); + carrier.put(capitalizedBaggageHeader, "value"); + + Context context = propagator.extract(Context.current(), carrier, getter); + + Baggage expectedBaggage = Baggage.builder().put("some-key", "value").build(); + assertThat(Baggage.fromContext(context)).isEqualTo(expectedBaggage); + } + @Test void extract_Baggage_InvalidContext() { Map carrier = new LinkedHashMap<>(); diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 7f93135c49b..a4b76b9530d 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 864d6c47512..36e4933e1da 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=591855b517fc635b9e04de1d05d5e76ada3f89f5fc76f87978d1b245b4f69225 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.3-bin.zip +distributionSha256Sum=20f1b1176237254a6fc204d8434196fa11a4cfb387567519c61556e8710aed78 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.13-bin.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/gradlew b/gradlew index 0adc8e1a532..f5feea6d6b1 100755 --- a/gradlew +++ b/gradlew @@ -15,6 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# SPDX-License-Identifier: Apache-2.0 +# ############################################################################## # @@ -55,7 +57,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -84,7 +86,8 @@ done # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) -APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s +' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -145,7 +148,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC3045 + # shellcheck disable=SC2039,SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac @@ -153,7 +156,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then '' | soft) :;; #( *) # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC3045 + # shellcheck disable=SC2039,SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -202,11 +205,11 @@ fi # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' -# Collect all arguments for the java command; -# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of -# shell script including quotes and variable substitutions, so put them in -# double quotes to make sure that they get re-expanded; and -# * put everything else in single quotes, so that it's not re-expanded. +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. set -- \ "-Dorg.gradle.appname=$APP_BASE_NAME" \ diff --git a/gradlew.bat b/gradlew.bat index 93e3f59f135..9d21a21834d 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -13,6 +13,8 @@ @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem @if "%DEBUG%"=="" @echo off @rem ########################################################################## @@ -43,11 +45,11 @@ set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 if %ERRORLEVEL% equ 0 goto execute -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. +echo. 1>&2 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 goto fail @@ -57,11 +59,11 @@ set JAVA_EXE=%JAVA_HOME%/bin/java.exe if exist "%JAVA_EXE%" goto execute -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. +echo. 1>&2 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 goto fail diff --git a/integration-tests/graal-incubating/build.gradle.kts b/integration-tests/graal-incubating/build.gradle.kts new file mode 100644 index 00000000000..7ad93ffb947 --- /dev/null +++ b/integration-tests/graal-incubating/build.gradle.kts @@ -0,0 +1,49 @@ +plugins { + id("otel.java-conventions") + id("org.graalvm.buildtools.native") +} + +description = "OpenTelemetry Graal Integration Tests (Incubating)" +otelJava.moduleName.set("io.opentelemetry.graal.integration.tests.incubating") + +sourceSets { + main { + // We need to ensure that we have the shadowed classes on the classpath, without this + // we will get the <:sdk:trace-shaded-deps> classes only, without the shadowed ones + val traceShadedDeps = project(":sdk:trace-shaded-deps") + output.dir(traceShadedDeps.file("build/extracted/shadow"), "builtBy" to ":sdk:trace-shaded-deps:extractShadowJar") + } +} + +dependencies { + implementation(project(":sdk:all")) + implementation(project(":sdk:trace-shaded-deps")) + implementation(project(":sdk:testing")) + implementation(project(":exporters:otlp:all")) + implementation(project(":api:incubator")) +} + +// org.graalvm.buildtools.native pluging requires java 11+ as of version 0.9.26 +// https://github.com/graalvm/native-build-tools/blob/master/docs/src/docs/asciidoc/index.adoc +tasks { + withType().configureEach { + sourceCompatibility = "11" + targetCompatibility = "11" + options.release.set(11) + } + withType().configureEach { + val testJavaVersion: String? by project + enabled = !testJavaVersion.equals("8") + } +} + +graalvmNative { + binaries { + named("test") { + // Required as of junit 5.10.0: https://junit.org/junit5/docs/5.10.0/release-notes/#deprecations-and-breaking-changes + buildArgs.add("--initialize-at-build-time=org.junit.platform.launcher.core.LauncherConfig") + buildArgs.add("--initialize-at-build-time=org.junit.jupiter.engine.config.InstantiatingConfigurationParameterConverter") + } + } + toolchainDetection.set(false) +} diff --git a/integration-tests/graal-incubating/src/test/java/io/opentelemetry/integrationtests/graal/IncubatingApiTests.java b/integration-tests/graal-incubating/src/test/java/io/opentelemetry/integrationtests/graal/IncubatingApiTests.java new file mode 100644 index 00000000000..fd3d0162dab --- /dev/null +++ b/integration-tests/graal-incubating/src/test/java/io/opentelemetry/integrationtests/graal/IncubatingApiTests.java @@ -0,0 +1,102 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.integrationtests.graal; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.incubator.logs.ExtendedLogger; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleCounter; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleGauge; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleHistogram; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleUpDownCounter; +import io.opentelemetry.api.incubator.metrics.ExtendedLongCounter; +import io.opentelemetry.api.incubator.metrics.ExtendedLongCounterBuilder; +import io.opentelemetry.api.incubator.metrics.ExtendedLongGauge; +import io.opentelemetry.api.incubator.metrics.ExtendedLongHistogram; +import io.opentelemetry.api.incubator.metrics.ExtendedLongUpDownCounter; +import io.opentelemetry.api.incubator.trace.ExtendedTracer; +import io.opentelemetry.api.logs.LoggerProvider; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.api.trace.TracerProvider; +import io.opentelemetry.sdk.logs.SdkLoggerProvider; +import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.testing.exporter.InMemoryLogRecordExporter; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import org.junit.jupiter.api.Test; + +class IncubatingApiTests { + @Test + void incubatingApiIsLoadedViaReflection() { + assertThat(LoggerProvider.noop().get("test")).isInstanceOf(ExtendedLogger.class); + assertThat(TracerProvider.noop().get("test")).isInstanceOf(ExtendedTracer.class); + assertThat(MeterProvider.noop().get("test").counterBuilder("test")) + .isInstanceOf(ExtendedLongCounterBuilder.class); + } + + @Test + void incubatingLogSdk() { + InMemoryLogRecordExporter exporter = InMemoryLogRecordExporter.create(); + SdkLoggerProvider loggerProvider = + SdkLoggerProvider.builder() + .addLogRecordProcessor(SimpleLogRecordProcessor.create(exporter)) + .build(); + + ExtendedLogger logger = (ExtendedLogger) loggerProvider.get("logger"); + logger.isEnabled(); + logger.logRecordBuilder().setBody("message").emit(); + } + + @Test + void incubatingTraceSdk() { + InMemorySpanExporter exporter = InMemorySpanExporter.create(); + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder().addSpanProcessor(SimpleSpanProcessor.create(exporter)).build(); + + ExtendedTracer tracer = (ExtendedTracer) tracerProvider.get("tracer"); + tracer.isEnabled(); + tracer.spanBuilder("span").startAndRun(() -> {}); + } + + @Test + void incubatingMetricSdk() { + InMemoryMetricReader reader = InMemoryMetricReader.create(); + SdkMeterProvider meterProvider = + SdkMeterProvider.builder().registerMetricReader(reader).build(); + + Meter meter = meterProvider.get("meter"); + + ExtendedLongCounter longCounter = + (ExtendedLongCounter) meter.counterBuilder("longCounter").build(); + longCounter.isEnabled(); + ExtendedDoubleCounter doubleCounter = + (ExtendedDoubleCounter) meter.counterBuilder("doubleCounter").ofDoubles().build(); + doubleCounter.isEnabled(); + ExtendedLongUpDownCounter longUpDownCounter = + (ExtendedLongUpDownCounter) meter.upDownCounterBuilder("longUpDownCounter").build(); + longUpDownCounter.isEnabled(); + ExtendedDoubleUpDownCounter doubleUpDownCounter = + (ExtendedDoubleUpDownCounter) + meter.upDownCounterBuilder("doubleUpDownCounter").ofDoubles().build(); + doubleUpDownCounter.isEnabled(); + ExtendedDoubleHistogram doubleHistogram = + (ExtendedDoubleHistogram) meter.histogramBuilder("doubleHistogram").build(); + doubleHistogram.isEnabled(); + ExtendedLongHistogram longHistogram = + (ExtendedLongHistogram) meter.histogramBuilder("longHistogram").ofLongs().build(); + longHistogram.isEnabled(); + ExtendedDoubleGauge doubleGauge = + (ExtendedDoubleGauge) meter.gaugeBuilder("doubleGauge").build(); + doubleGauge.isEnabled(); + ExtendedLongGauge longGauge = + (ExtendedLongGauge) meter.gaugeBuilder("longGauge").ofLongs().build(); + longGauge.isEnabled(); + } +} diff --git a/integration-tests/graal/build.gradle.kts b/integration-tests/graal/build.gradle.kts index 41404ddfc8d..090be76a2f5 100644 --- a/integration-tests/graal/build.gradle.kts +++ b/integration-tests/graal/build.gradle.kts @@ -16,7 +16,23 @@ sourceSets { } dependencies { - implementation(project(path = ":sdk:trace-shaded-deps")) + implementation(project(":sdk:all")) + implementation(project(":sdk:trace-shaded-deps")) + implementation(project(":exporters:otlp:all")) +} + +// org.graalvm.buildtools.native pluging requires java 11+ as of version 0.9.26 +// https://github.com/graalvm/native-build-tools/blob/master/docs/src/docs/asciidoc/index.adoc +tasks { + withType().configureEach { + sourceCompatibility = "11" + targetCompatibility = "11" + options.release.set(11) + } + withType().configureEach { + val testJavaVersion: String? by project + enabled = !testJavaVersion.equals("8") + } } graalvmNative { diff --git a/integration-tests/graal/src/test/java/io/opentelemetry/integrationtests/graal/IncubatingNotFoundApiTests.java b/integration-tests/graal/src/test/java/io/opentelemetry/integrationtests/graal/IncubatingNotFoundApiTests.java new file mode 100644 index 00000000000..ba2ba02a2c3 --- /dev/null +++ b/integration-tests/graal/src/test/java/io/opentelemetry/integrationtests/graal/IncubatingNotFoundApiTests.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.integrationtests.graal; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.logs.Logger; +import io.opentelemetry.api.logs.LoggerProvider; +import io.opentelemetry.api.metrics.LongCounterBuilder; +import io.opentelemetry.api.metrics.MeterProvider; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.api.trace.TracerProvider; +import org.junit.jupiter.api.Test; + +class IncubatingNotFoundApiTests { + @Test + void incubatingApiIsNotFoundViaReflection() { + assertThat(LoggerProvider.noop().get("test")).isInstanceOf(Logger.class); + assertThat(TracerProvider.noop().get("test")).isInstanceOf(Tracer.class); + assertThat(MeterProvider.noop().get("test").counterBuilder("test")) + .isInstanceOf(LongCounterBuilder.class); + } +} diff --git a/integration-tests/graal/src/test/java/io/opentelemetry/integrationtests/graal/InitializeSdkTest.java b/integration-tests/graal/src/test/java/io/opentelemetry/integrationtests/graal/InitializeSdkTest.java new file mode 100644 index 00000000000..8826db405de --- /dev/null +++ b/integration-tests/graal/src/test/java/io/opentelemetry/integrationtests/graal/InitializeSdkTest.java @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.integrationtests.graal; + +import static org.assertj.core.api.Assertions.assertThatCode; + +import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; +import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; +import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.logs.SdkLoggerProvider; +import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; +import org.junit.jupiter.api.Test; + +class InitializeSdkTest { + + @Test + void initializeSdk() { + assertThatCode( + () -> { + OpenTelemetrySdk sdk = + OpenTelemetrySdk.builder() + .setTracerProvider( + SdkTracerProvider.builder() + .addSpanProcessor( + BatchSpanProcessor.builder(OtlpGrpcSpanExporter.getDefault()) + .build()) + .build()) + .setMeterProvider( + SdkMeterProvider.builder() + .registerMetricReader( + PeriodicMetricReader.create(OtlpGrpcMetricExporter.getDefault())) + .build()) + .setLoggerProvider( + SdkLoggerProvider.builder() + .addLogRecordProcessor( + BatchLogRecordProcessor.builder( + OtlpGrpcLogRecordExporter.getDefault()) + .build()) + .build()) + .build(); + sdk.close(); + }) + .doesNotThrowAnyException(); + } +} diff --git a/integration-tests/otlp/build.gradle.kts b/integration-tests/otlp/build.gradle.kts index 6b01f55ac9e..5f67edda0e1 100644 --- a/integration-tests/otlp/build.gradle.kts +++ b/integration-tests/otlp/build.gradle.kts @@ -9,7 +9,7 @@ dependencies { api("org.testcontainers:junit-jupiter") implementation(project(":exporters:otlp:all")) - implementation(project(":api:events")) + implementation(project(":api:incubator")) compileOnly("com.google.errorprone:error_prone_annotations") diff --git a/integration-tests/otlp/src/main/java/io/opentelemetry/integrationtest/OtlpExporterIntegrationTest.java b/integration-tests/otlp/src/main/java/io/opentelemetry/integrationtest/OtlpExporterIntegrationTest.java index 379aeb5112c..83121892006 100644 --- a/integration-tests/otlp/src/main/java/io/opentelemetry/integrationtest/OtlpExporterIntegrationTest.java +++ b/integration-tests/otlp/src/main/java/io/opentelemetry/integrationtest/OtlpExporterIntegrationTest.java @@ -5,11 +5,13 @@ package io.opentelemetry.integrationtest; +import static io.opentelemetry.api.common.Value.of; import static java.util.concurrent.CompletableFuture.completedFuture; import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; import static org.testcontainers.Testcontainers.exposeHostPorts; +import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import com.linecorp.armeria.server.ServerBuilder; import com.linecorp.armeria.server.ServiceRequestContext; @@ -19,7 +21,8 @@ import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.events.EventEmitter; +import io.opentelemetry.api.common.KeyValue; +import io.opentelemetry.api.incubator.logs.ExtendedLogRecordBuilder; import io.opentelemetry.api.logs.Logger; import io.opentelemetry.api.logs.Severity; import io.opentelemetry.api.metrics.LongCounter; @@ -45,7 +48,8 @@ import io.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest; import io.opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse; import io.opentelemetry.proto.common.v1.AnyValue; -import io.opentelemetry.proto.common.v1.KeyValue; +import io.opentelemetry.proto.common.v1.ArrayValue; +import io.opentelemetry.proto.common.v1.KeyValueList; import io.opentelemetry.proto.logs.v1.ResourceLogs; import io.opentelemetry.proto.logs.v1.ScopeLogs; import io.opentelemetry.proto.metrics.v1.AggregationTemporality; @@ -57,10 +61,10 @@ import io.opentelemetry.proto.trace.v1.ResourceSpans; import io.opentelemetry.proto.trace.v1.ScopeSpans; import io.opentelemetry.proto.trace.v1.Span.Link; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.logs.SdkLoggerProvider; import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor; import io.opentelemetry.sdk.logs.export.LogRecordExporter; -import io.opentelemetry.sdk.logs.internal.SdkEventEmitterProvider; import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; import io.opentelemetry.sdk.metrics.export.MetricExporter; @@ -73,6 +77,7 @@ import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; import io.opentelemetry.sdk.trace.export.SpanExporter; import java.io.UncheckedIOException; +import java.nio.charset.StandardCharsets; import java.time.Duration; import java.util.ArrayList; import java.util.Collections; @@ -86,6 +91,7 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; import org.junit.jupiter.params.provider.ValueSource; import org.slf4j.LoggerFactory; import org.testcontainers.containers.BindMode; @@ -124,7 +130,10 @@ abstract class OtlpExporterIntegrationTest { @RegisterExtension static final SelfSignedCertificateExtension clientTls = new SelfSignedCertificateExtension(); + @SuppressWarnings("NonFinalStaticField") private static OtlpGrpcServer grpcServer; + + @SuppressWarnings("NonFinalStaticField") private static GenericContainer collector; @BeforeAll @@ -185,7 +194,7 @@ void afterEach() {} @ParameterizedTest @ValueSource(strings = {"gzip", "none"}) void testOtlpGrpcTraceExport(String compression) { - SpanExporter otlpGrpcTraceExporter = + SpanExporter exporter = OtlpGrpcSpanExporter.builder() .setEndpoint( "http://" @@ -195,7 +204,23 @@ void testOtlpGrpcTraceExport(String compression) { .setCompression(compression) .build(); - testTraceExport(otlpGrpcTraceExporter); + testTraceExport(exporter); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testOtlpGrpcTraceExport_memoryMode(MemoryMode memoryMode) { + SpanExporter exporter = + OtlpGrpcSpanExporter.builder() + .setEndpoint( + "http://" + + collector.getHost() + + ":" + + collector.getMappedPort(COLLECTOR_OTLP_GRPC_PORT)) + .setMemoryMode(memoryMode) + .build(); + + testTraceExport(exporter); } @Test @@ -217,7 +242,7 @@ void testOtlpGrpcTraceExport_mtls() throws Exception { @ParameterizedTest @ValueSource(strings = {"gzip", "none"}) void testOtlpHttpTraceExport(String compression) { - SpanExporter otlpGrpcTraceExporter = + SpanExporter exporter = OtlpHttpSpanExporter.builder() .setEndpoint( "http://" @@ -228,7 +253,24 @@ void testOtlpHttpTraceExport(String compression) { .setCompression(compression) .build(); - testTraceExport(otlpGrpcTraceExporter); + testTraceExport(exporter); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testOtlpHttpTraceExport_memoryMode(MemoryMode memoryMode) { + SpanExporter exporter = + OtlpHttpSpanExporter.builder() + .setEndpoint( + "http://" + + collector.getHost() + + ":" + + collector.getMappedPort(COLLECTOR_OTLP_HTTP_PORT) + + "/v1/traces") + .setMemoryMode(memoryMode) + .build(); + + testTraceExport(exporter); } @Test @@ -284,7 +326,7 @@ private static void testTraceExport(SpanExporter spanExporter) { ResourceSpans resourceSpans = request.getResourceSpans(0); assertThat(resourceSpans.getResource().getAttributesList()) .contains( - KeyValue.newBuilder() + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() .setKey(SERVICE_NAME.getKey()) .setValue(AnyValue.newBuilder().setStringValue("integration test").build()) .build()); @@ -303,7 +345,7 @@ private static void testTraceExport(SpanExporter spanExporter) { assertThat(protoSpan.getAttributesList()) .isEqualTo( Collections.singletonList( - KeyValue.newBuilder() + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() .setKey("key") .setValue(AnyValue.newBuilder().setStringValue("value").build()) .build())); @@ -318,7 +360,7 @@ private static void testTraceExport(SpanExporter spanExporter) { @ParameterizedTest @ValueSource(strings = {"gzip", "none"}) void testOtlpGrpcMetricExport(String compression) { - MetricExporter otlpGrpcMetricExporter = + MetricExporter exporter = OtlpGrpcMetricExporter.builder() .setEndpoint( "http://" @@ -328,7 +370,24 @@ void testOtlpGrpcMetricExport(String compression) { .setCompression(compression) .build(); - testMetricExport(otlpGrpcMetricExporter); + testMetricExport(exporter); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testOtlpGrpcMetricExport_memoryMode(MemoryMode memoryMode) { + MetricExporter exporter = + OtlpGrpcMetricExporter.builder() + .setEndpoint( + "http://" + + collector.getHost() + + ":" + + collector.getMappedPort(COLLECTOR_OTLP_GRPC_PORT)) + .setMemoryMode(memoryMode) + .build(); + assertThat(exporter.getMemoryMode()).isEqualTo(memoryMode); + + testMetricExport(exporter); } @Test @@ -350,7 +409,8 @@ void testOtlpGrpcMetricExport_mtls() throws Exception { @ParameterizedTest @ValueSource(strings = {"gzip", "none"}) void testOtlpHttpMetricExport(String compression) { - MetricExporter otlpGrpcMetricExporter = + + MetricExporter exporter = OtlpHttpMetricExporter.builder() .setEndpoint( "http://" @@ -361,7 +421,25 @@ void testOtlpHttpMetricExport(String compression) { .setCompression(compression) .build(); - testMetricExport(otlpGrpcMetricExporter); + testMetricExport(exporter); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testOtlpHttpMetricExport_memoryMode(MemoryMode memoryMode) { + MetricExporter exporter = + OtlpHttpMetricExporter.builder() + .setEndpoint( + "http://" + + collector.getHost() + + ":" + + collector.getMappedPort(COLLECTOR_OTLP_HTTP_PORT) + + "/v1/metrics") + .setMemoryMode(memoryMode) + .build(); + assertThat(exporter.getMemoryMode()).isEqualTo(memoryMode); + + testMetricExport(exporter); } @Test @@ -413,7 +491,7 @@ private static void testMetricExport(MetricExporter metricExporter) { ResourceMetrics resourceMetrics = request.getResourceMetrics(0); assertThat(resourceMetrics.getResource().getAttributesList()) .contains( - KeyValue.newBuilder() + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() .setKey(SERVICE_NAME.getKey()) .setValue(AnyValue.newBuilder().setStringValue("integration test").build()) .build()); @@ -438,7 +516,7 @@ private static void testMetricExport(MetricExporter metricExporter) { assertThat(dataPoint.getAttributesList()) .isEqualTo( Collections.singletonList( - KeyValue.newBuilder() + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() .setKey("key") .setValue(AnyValue.newBuilder().setStringValue("value").build()) .build())); @@ -461,6 +539,22 @@ void testOtlpGrpcLogExport(String compression) { testLogRecordExporter(otlpGrpcLogRecordExporter); } + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testOtlpGrpcLogExport_memoryMode(MemoryMode memoryMode) { + LogRecordExporter otlpGrpcLogRecordExporter = + OtlpGrpcLogRecordExporter.builder() + .setEndpoint( + "http://" + + collector.getHost() + + ":" + + collector.getMappedPort(COLLECTOR_OTLP_GRPC_PORT)) + .setMemoryMode(memoryMode) + .build(); + + testLogRecordExporter(otlpGrpcLogRecordExporter); + } + @Test void testOtlpGrpcLogExport_mtls() throws Exception { LogRecordExporter exporter = @@ -494,6 +588,23 @@ void testOtlpHttpLogExport(String compression) { testLogRecordExporter(otlpHttpLogRecordExporter); } + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testOtlpHttpLogExport_memoryMode(MemoryMode memoryMode) { + LogRecordExporter otlpHttpLogRecordExporter = + OtlpHttpLogRecordExporter.builder() + .setEndpoint( + "http://" + + collector.getHost() + + ":" + + collector.getMappedPort(COLLECTOR_OTLP_HTTP_PORT) + + "/v1/logs") + .setMemoryMode(memoryMode) + .build(); + + testLogRecordExporter(otlpHttpLogRecordExporter); + } + @Test void testOtlpHttpLogExport_mtls() throws Exception { LogRecordExporter exporter = @@ -511,6 +622,7 @@ void testOtlpHttpLogExport_mtls() throws Exception { testLogRecordExporter(exporter); } + @SuppressWarnings("BadImport") private static void testLogRecordExporter(LogRecordExporter logRecordExporter) { SdkLoggerProvider loggerProvider = SdkLoggerProvider.builder() @@ -522,11 +634,6 @@ private static void testLogRecordExporter(LogRecordExporter logRecordExporter) { .build(); Logger logger = loggerProvider.get(OtlpExporterIntegrationTest.class.getName()); - EventEmitter eventEmitter = - SdkEventEmitterProvider.create(loggerProvider) - .eventEmitterBuilder(OtlpExporterIntegrationTest.class.getName()) - .setEventDomain("event-domain") - .build(); SpanContext spanContext = SpanContext.create( @@ -535,17 +642,30 @@ private static void testLogRecordExporter(LogRecordExporter logRecordExporter) { TraceFlags.getDefault(), TraceState.getDefault()); - try (Scope unused = Span.wrap(spanContext).makeCurrent()) { - logger - .logRecordBuilder() + try (Scope ignored = Span.wrap(spanContext).makeCurrent()) { + ((ExtendedLogRecordBuilder) logger.logRecordBuilder()) + .setEventName("event name") + .setBody( + of( + KeyValue.of("str_key", of("value")), + KeyValue.of("bool_key", of(true)), + KeyValue.of("int_key", of(1L)), + KeyValue.of("double_key", of(1.1)), + KeyValue.of("bytes_key", of("value".getBytes(StandardCharsets.UTF_8))), + KeyValue.of("arr_key", of(of("value"), of(1L))), + KeyValue.of( + "kv_list", + of( + KeyValue.of("child_str_key", of("value")), + KeyValue.of( + "child_kv_list", + of(KeyValue.of("grandchild_str_key", of("value")))))))) .setTimestamp(100, TimeUnit.NANOSECONDS) - .setBody("log body") .setAllAttributes(Attributes.builder().put("key", "value").build()) .setSeverity(Severity.DEBUG) .setSeverityText("DEBUG") .setContext(Context.current()) .emit(); - eventEmitter.emit("event-name", Attributes.builder().put("key", "value").build()); } // Closing triggers flush of processor @@ -561,7 +681,7 @@ private static void testLogRecordExporter(LogRecordExporter logRecordExporter) { ResourceLogs resourceLogs = request.getResourceLogs(0); assertThat(resourceLogs.getResource().getAttributesList()) .contains( - KeyValue.newBuilder() + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() .setKey(SERVICE_NAME.getKey()) .setValue(AnyValue.newBuilder().setStringValue("integration test").build()) .build()); @@ -569,15 +689,111 @@ private static void testLogRecordExporter(LogRecordExporter logRecordExporter) { ScopeLogs ilLogs = resourceLogs.getScopeLogs(0); assertThat(ilLogs.getScope().getName()).isEqualTo(OtlpExporterIntegrationTest.class.getName()); - assertThat(ilLogs.getLogRecordsCount()).isEqualTo(2); + assertThat(ilLogs.getLogRecordsCount()).isEqualTo(1); // LogRecord via Logger.logRecordBuilder()...emit() io.opentelemetry.proto.logs.v1.LogRecord protoLog1 = ilLogs.getLogRecords(0); - assertThat(protoLog1.getBody().getStringValue()).isEqualTo("log body"); + assertThat(protoLog1.getEventName()).isEqualTo("event name"); + assertThat(protoLog1.getBody()) + .isEqualTo( + AnyValue.newBuilder() + .setKvlistValue( + KeyValueList.newBuilder() + .addValues( + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() + .setKey("str_key") + .setValue(AnyValue.newBuilder().setStringValue("value").build()) + .build()) + .addValues( + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() + .setKey("bool_key") + .setValue(AnyValue.newBuilder().setBoolValue(true).build()) + .build()) + .addValues( + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() + .setKey("int_key") + .setValue(AnyValue.newBuilder().setIntValue(1).build()) + .build()) + .addValues( + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() + .setKey("double_key") + .setValue(AnyValue.newBuilder().setDoubleValue(1.1).build()) + .build()) + .addValues( + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() + .setKey("bytes_key") + .setValue( + AnyValue.newBuilder() + .setBytesValue( + ByteString.copyFrom( + "value".getBytes(StandardCharsets.UTF_8))) + .build()) + .build()) + .addValues( + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() + .setKey("arr_key") + .setValue( + AnyValue.newBuilder() + .setArrayValue( + ArrayValue.newBuilder() + .addValues( + AnyValue.newBuilder() + .setStringValue("value") + .build()) + .addValues( + AnyValue.newBuilder().setIntValue(1).build()) + .build()) + .build()) + .build()) + .addValues( + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() + .setKey("kv_list") + .setValue( + AnyValue.newBuilder() + .setKvlistValue( + KeyValueList.newBuilder() + .addValues( + io.opentelemetry.proto.common.v1.KeyValue + .newBuilder() + .setKey("child_str_key") + .setValue( + AnyValue.newBuilder() + .setStringValue("value") + .build()) + .build()) + .addValues( + io.opentelemetry.proto.common.v1.KeyValue + .newBuilder() + .setKey("child_kv_list") + .setValue( + AnyValue.newBuilder() + .setKvlistValue( + KeyValueList.newBuilder() + .addValues( + io.opentelemetry.proto + .common.v1.KeyValue + .newBuilder() + .setKey( + "grandchild_str_key") + .setValue( + AnyValue + .newBuilder() + .setStringValue( + "value") + .build()) + .build()) + .build()) + .build()) + .build()) + .build()) + .build()) + .build()) + .build()) + .build()); assertThat(protoLog1.getAttributesList()) .isEqualTo( Collections.singletonList( - KeyValue.newBuilder() + io.opentelemetry.proto.common.v1.KeyValue.newBuilder() .setKey("key") .setValue(AnyValue.newBuilder().setStringValue("value").build()) .build())); @@ -591,32 +807,6 @@ private static void testLogRecordExporter(LogRecordExporter logRecordExporter) { assertThat(TraceFlags.fromByte((byte) protoLog1.getFlags())) .isEqualTo(spanContext.getTraceFlags()); assertThat(protoLog1.getTimeUnixNano()).isEqualTo(100); - - // LogRecord via EventEmitter.emit(String, Attributes) - io.opentelemetry.proto.logs.v1.LogRecord protoLog2 = ilLogs.getLogRecords(1); - assertThat(protoLog2.getBody().getStringValue()).isEmpty(); - assertThat(protoLog2.getAttributesList()) - .containsExactlyInAnyOrder( - KeyValue.newBuilder() - .setKey("event.domain") - .setValue(AnyValue.newBuilder().setStringValue("event-domain").build()) - .build(), - KeyValue.newBuilder() - .setKey("event.name") - .setValue(AnyValue.newBuilder().setStringValue("event-name").build()) - .build(), - KeyValue.newBuilder() - .setKey("key") - .setValue(AnyValue.newBuilder().setStringValue("value").build()) - .build()); - assertThat(protoLog2.getSeverityText()).isEmpty(); - assertThat(TraceId.fromBytes(protoLog2.getTraceId().toByteArray())) - .isEqualTo(spanContext.getTraceId()); - assertThat(SpanId.fromBytes(protoLog2.getSpanId().toByteArray())) - .isEqualTo(spanContext.getSpanId()); - assertThat(TraceFlags.fromByte((byte) protoLog2.getFlags())) - .isEqualTo(spanContext.getTraceFlags()); - assertThat(protoLog2.getTimeUnixNano()).isGreaterThan(0); } private static class OtlpGrpcServer extends ServerExtension { diff --git a/integration-tests/otlp/src/main/resources/otel-config.yaml b/integration-tests/otlp/src/main/resources/otel-config.yaml index a58cf409526..614cb5443bd 100644 --- a/integration-tests/otlp/src/main/resources/otel-config.yaml +++ b/integration-tests/otlp/src/main/resources/otel-config.yaml @@ -1,5 +1,6 @@ extensions: - health_check: {} + health_check: + endpoint: 0.0.0.0:13133 receivers: otlp: protocols: @@ -12,20 +13,20 @@ receivers: grpc: endpoint: 0.0.0.0:5317 tls: - client_ca_file: $MTLS_CLIENT_CERTIFICATE - cert_file: $MTLS_SERVER_CERTIFICATE - key_file: $MTLS_SERVER_KEY + client_ca_file: ${MTLS_CLIENT_CERTIFICATE} + cert_file: ${MTLS_SERVER_CERTIFICATE} + key_file: ${MTLS_SERVER_KEY} http: endpoint: 0.0.0.0:5318 tls: - client_ca_file: $MTLS_CLIENT_CERTIFICATE - cert_file: $MTLS_SERVER_CERTIFICATE - key_file: $MTLS_SERVER_KEY + client_ca_file: ${MTLS_CLIENT_CERTIFICATE} + cert_file: ${MTLS_SERVER_CERTIFICATE} + key_file: ${MTLS_SERVER_KEY} exporters: - logging: - verbosity: $LOGGING_EXPORTER_VERBOSITY_LEVEL + debug: + verbosity: ${LOGGING_EXPORTER_VERBOSITY_LEVEL} otlp: - endpoint: $OTLP_EXPORTER_ENDPOINT + endpoint: ${OTLP_EXPORTER_ENDPOINT} tls: insecure: true compression: none @@ -34,10 +35,10 @@ service: pipelines: metrics: receivers: [otlp, otlp/mtls] - exporters: [logging, otlp] + exporters: [debug, otlp] traces: receivers: [otlp, otlp/mtls] - exporters: [logging, otlp] + exporters: [debug, otlp] logs: receivers: [otlp, otlp/mtls] - exporters: [logging, otlp] + exporters: [debug, otlp] diff --git a/integration-tests/src/test/java/io/opentelemetry/B3PropagationIntegrationTest.java b/integration-tests/src/test/java/io/opentelemetry/B3PropagationIntegrationTest.java index b87db824a26..66eaa8e6cef 100644 --- a/integration-tests/src/test/java/io/opentelemetry/B3PropagationIntegrationTest.java +++ b/integration-tests/src/test/java/io/opentelemetry/B3PropagationIntegrationTest.java @@ -50,7 +50,10 @@ class B3PropagationIntegrationTest { private static final InMemorySpanExporter spanExporter = InMemorySpanExporter.create(); + @SuppressWarnings("NonFinalStaticField") static WebClient b3MultiClient; + + @SuppressWarnings("NonFinalStaticField") static WebClient b3SingleClient; private static class FrontendService implements HttpService { diff --git a/integration-tests/tracecontext/build.gradle.kts b/integration-tests/tracecontext/build.gradle.kts index 44cea7d471b..17b01ecddb7 100644 --- a/integration-tests/tracecontext/build.gradle.kts +++ b/integration-tests/tracecontext/build.gradle.kts @@ -1,7 +1,7 @@ plugins { id("otel.java-conventions") - id("com.github.johnrengelman.shadow") + id("com.gradleup.shadow") } description = "OpenTelemetry W3C Context Propagation Integration Tests" diff --git a/integration-tests/tracecontext/docker/Dockerfile b/integration-tests/tracecontext/docker/Dockerfile index fc6dd4e7cea..d9de90d1102 100644 --- a/integration-tests/tracecontext/docker/Dockerfile +++ b/integration-tests/tracecontext/docker/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3 AS build +FROM python:3.13.2@sha256:385ccb8304f6330738a6d9e6fa0bd7608e006da7e15bc52b33b0398e1ba4a15b AS build # Main branch SHA as of April-1-2021 ARG TRACECONTEXT_GIT_TAG="dcd3ad9b7d6ac36f70ff3739874b73c11b0302a1" @@ -11,7 +11,7 @@ RUN unzip trace-context.zip RUN rm trace-context.zip RUN mv trace-context-${TRACECONTEXT_GIT_TAG}/test /tracecontext-testsuite -FROM python:3-slim +FROM python:3.13.2-slim@sha256:f3614d98f38b0525d670f287b0474385952e28eb43016655dd003d0e28cf8652 RUN pip install aiohttp diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapter.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/MetricAdapter.java similarity index 97% rename from opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapter.java rename to opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/MetricAdapter.java index 7ea80936d74..366f6786a4d 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapter.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/MetricAdapter.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.opencensusshim.internal.metrics; +package io.opentelemetry.opencensusshim; import io.opencensus.common.Timestamp; import io.opencensus.metrics.LabelKey; @@ -55,13 +55,8 @@ import java.util.regex.Pattern; import javax.annotation.Nullable; -/** - * Adapts an OpenCensus metric into the OpenTelemetry metric data API. - * - *

This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time. - */ -public final class MetricAdapter { +/** Adapts an OpenCensus metric into the OpenTelemetry metric data API. */ +final class MetricAdapter { private MetricAdapter() {} // All OpenCensus metrics come from this shim. @@ -81,7 +76,7 @@ private MetricAdapter() {} * @param otelResource The resource associated with the opentelemetry SDK. * @param censusMetric The OpenCensus metric to convert. */ - public static MetricData convert(Resource otelResource, Metric censusMetric) { + static MetricData convert(Resource otelResource, Metric censusMetric) { // Note: we can't just adapt interfaces, we need to do full copy because OTel data API uses // auto-value vs. pure interfaces. switch (censusMetric.getMetricDescriptor().getType()) { diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenCensusMetricProducer.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenCensusMetricProducer.java new file mode 100644 index 00000000000..121c61360f1 --- /dev/null +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenCensusMetricProducer.java @@ -0,0 +1,52 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.opencensusshim; + +import io.opencensus.metrics.Metrics; +import io.opencensus.metrics.export.MetricProducerManager; +import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.MetricProducer; +import io.opentelemetry.sdk.metrics.export.MetricReader; +import io.opentelemetry.sdk.resources.Resource; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +/** + * {@link MetricProducer} for OpenCensus metrics, which allows {@link MetricReader}s to read from + * both OpenTelemetry and OpenCensus metrics. + * + *

To use, register with {@link SdkMeterProviderBuilder#registerMetricProducer(MetricProducer)}. + */ +public final class OpenCensusMetricProducer implements MetricProducer { + private final MetricProducerManager openCensusMetricStorage; + + private OpenCensusMetricProducer(MetricProducerManager openCensusMetricStorage) { + this.openCensusMetricStorage = openCensusMetricStorage; + } + + /** + * Constructs a new {@link OpenCensusMetricProducer} that reports against the given {@link + * Resource}. + */ + public static MetricProducer create() { + return new OpenCensusMetricProducer(Metrics.getExportComponent().getMetricProducerManager()); + } + + @Override + public Collection produce(Resource resource) { + List result = new ArrayList<>(); + openCensusMetricStorage + .getAllMetricProducer() + .forEach( + producer -> + producer + .getMetrics() + .forEach(metric -> result.add(MetricAdapter.convert(resource, metric)))); + return result; + } +} diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryContextManager.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryContextManager.java index d73e41ab5dd..24d9818645c 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryContextManager.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryContextManager.java @@ -22,6 +22,7 @@ public final class OpenTelemetryContextManager implements ContextManager { private static final Logger LOGGER = Logger.getLogger(OpenTelemetryContextManager.class.getName()); + @SuppressWarnings("unused") // Loaded via reflection public OpenTelemetryContextManager() {} @Override diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryCtx.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryCtx.java index ae94f630713..a551597186f 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryCtx.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryCtx.java @@ -10,7 +10,7 @@ import io.opentelemetry.context.Scope; import javax.annotation.Nullable; -class OpenTelemetryCtx implements ContextHandle { +final class OpenTelemetryCtx implements ContextHandle { private final Context context; diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java deleted file mode 100644 index 32783892204..00000000000 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricsExporter.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.opencensusshim; - -import com.google.common.base.Joiner; -import io.opencensus.common.Duration; -import io.opencensus.exporter.metrics.util.IntervalMetricReader; -import io.opencensus.exporter.metrics.util.MetricExporter; -import io.opencensus.exporter.metrics.util.MetricReader; -import io.opencensus.metrics.Metrics; -import io.opencensus.metrics.export.Metric; -import io.opencensus.metrics.export.MetricDescriptor; -import io.opentelemetry.opencensusshim.internal.metrics.MetricAdapter; -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.resources.Resource; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.logging.Logger; - -@Deprecated -public final class OpenTelemetryMetricsExporter extends MetricExporter { - private static final Logger LOGGER = - Logger.getLogger(OpenTelemetryMetricsExporter.class.getName()); - - private static final String EXPORTER_NAME = "OpenTelemetryMetricExporter"; - - private final IntervalMetricReader intervalMetricReader; - private final io.opentelemetry.sdk.metrics.export.MetricExporter otelExporter; - // TODO - find this from OTel SDK. - private final Resource resource = Resource.getDefault(); - - public static OpenTelemetryMetricsExporter createAndRegister( - io.opentelemetry.sdk.metrics.export.MetricExporter otelExporter) { - return new OpenTelemetryMetricsExporter(otelExporter, Duration.create(60, 0)); - } - - public static OpenTelemetryMetricsExporter createAndRegister( - io.opentelemetry.sdk.metrics.export.MetricExporter otelExporter, Duration exportInterval) { - return new OpenTelemetryMetricsExporter(otelExporter, exportInterval); - } - - private OpenTelemetryMetricsExporter( - io.opentelemetry.sdk.metrics.export.MetricExporter otelExporter, Duration exportInterval) { - this.otelExporter = otelExporter; - IntervalMetricReader.Options.Builder options = IntervalMetricReader.Options.builder(); - MetricReader reader = - MetricReader.create( - MetricReader.Options.builder() - .setMetricProducerManager(Metrics.getExportComponent().getMetricProducerManager()) - .setSpanName(EXPORTER_NAME) - .build()); - intervalMetricReader = - IntervalMetricReader.create( - this, reader, options.setExportInterval(exportInterval).build()); - } - - @Override - public void export(Collection metrics) { - List metricData = new ArrayList<>(); - Set unsupportedTypes = new HashSet<>(); - for (Metric metric : metrics) { - metricData.add(MetricAdapter.convert(resource, metric)); - } - if (!unsupportedTypes.isEmpty()) { - LOGGER.warning( - Joiner.on(",").join(unsupportedTypes) - + " not supported by OpenCensus to OpenTelemetry migrator."); - } - if (!metricData.isEmpty()) { - otelExporter.export(metricData); - } - } - - public void stop() { - intervalMetricReader.stop(); - } -} diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryNoRecordEventsSpanImpl.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryNoRecordEventsSpanImpl.java index b6f3e8eec11..1e18beb8ce4 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryNoRecordEventsSpanImpl.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryNoRecordEventsSpanImpl.java @@ -39,7 +39,8 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Nonnull; -class OpenTelemetryNoRecordEventsSpanImpl extends Span implements io.opentelemetry.api.trace.Span { +final class OpenTelemetryNoRecordEventsSpanImpl extends Span + implements io.opentelemetry.api.trace.Span { private static final EnumSet NOT_RECORD_EVENTS_SPAN_OPTIONS = EnumSet.noneOf(Options.class); diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryPropagationComponentImpl.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryPropagationComponentImpl.java index 6898345ebd0..a4d77541675 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryPropagationComponentImpl.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryPropagationComponentImpl.java @@ -10,7 +10,7 @@ import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.extension.trace.propagation.B3Propagator; -class OpenTelemetryPropagationComponentImpl extends PropagationComponentImpl { +final class OpenTelemetryPropagationComponentImpl extends PropagationComponentImpl { private final TextFormat b3Format = new OpenTelemetryTextFormatImpl(B3Propagator.injectingMultiHeaders()); diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetrySpanBuilderImpl.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetrySpanBuilderImpl.java index bd09e9b66f1..023180bb870 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetrySpanBuilderImpl.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetrySpanBuilderImpl.java @@ -48,7 +48,7 @@ import java.util.Random; import javax.annotation.Nullable; -class OpenTelemetrySpanBuilderImpl extends SpanBuilder { +final class OpenTelemetrySpanBuilderImpl extends SpanBuilder { private static final Tracer OTEL_TRACER = GlobalOpenTelemetry.getTracer("io.opentelemetry.opencensusshim", OtelVersion.VERSION); @@ -68,6 +68,29 @@ class OpenTelemetrySpanBuilderImpl extends SpanBuilder { @Nullable private Sampler ocSampler; @Nullable private SpanKind otelKind; + private OpenTelemetrySpanBuilderImpl( + String name, + @Nullable SpanContext ocRemoteParentSpanContext, + @Nullable Span ocParent, + OpenTelemetrySpanBuilderImpl.Options options) { + this.name = checkNotNull(name, "name"); + this.ocParent = ocParent; + this.ocRemoteParentSpanContext = ocRemoteParentSpanContext; + this.options = options; + } + + static OpenTelemetrySpanBuilderImpl createWithParent( + String spanName, @Nullable Span parent, OpenTelemetrySpanBuilderImpl.Options options) { + return new OpenTelemetrySpanBuilderImpl(spanName, null, parent, options); + } + + static OpenTelemetrySpanBuilderImpl createWithRemoteParent( + String spanName, + @Nullable SpanContext remoteParentSpanContext, + OpenTelemetrySpanBuilderImpl.Options options) { + return new OpenTelemetrySpanBuilderImpl(spanName, remoteParentSpanContext, null, options); + } + @Override public SpanBuilder setSampler(Sampler sampler) { this.ocSampler = checkNotNull(sampler, "sampler"); @@ -141,7 +164,11 @@ public Span startSpan() { otelSpanBuilder.setParent(Context.current().with((OpenTelemetrySpanImpl) ocParent)); } if (ocRemoteParentSpanContext != null) { - otelSpanBuilder.addLink(SpanConverter.mapSpanContext(ocRemoteParentSpanContext)); + io.opentelemetry.api.trace.SpanContext spanContext = + SpanConverter.mapSpanContext(ocRemoteParentSpanContext, /* isRemoteParent= */ true); + otelSpanBuilder.setParent( + Context.current().with(io.opentelemetry.api.trace.Span.wrap(spanContext))); + otelSpanBuilder.addLink(spanContext); } if (otelKind != null) { otelSpanBuilder.setSpanKind(otelKind); @@ -155,29 +182,6 @@ public Span startSpan() { return new OpenTelemetrySpanImpl(otSpan); } - private OpenTelemetrySpanBuilderImpl( - String name, - @Nullable SpanContext ocRemoteParentSpanContext, - @Nullable Span ocParent, - OpenTelemetrySpanBuilderImpl.Options options) { - this.name = checkNotNull(name, "name"); - this.ocParent = ocParent; - this.ocRemoteParentSpanContext = ocRemoteParentSpanContext; - this.options = options; - } - - static OpenTelemetrySpanBuilderImpl createWithParent( - String spanName, @Nullable Span parent, OpenTelemetrySpanBuilderImpl.Options options) { - return new OpenTelemetrySpanBuilderImpl(spanName, null, parent, options); - } - - static OpenTelemetrySpanBuilderImpl createWithRemoteParent( - String spanName, - @Nullable SpanContext remoteParentSpanContext, - OpenTelemetrySpanBuilderImpl.Options options) { - return new OpenTelemetrySpanBuilderImpl(spanName, remoteParentSpanContext, null, options); - } - private static boolean makeSamplingDecision( @Nullable SpanContext parent, @Nullable Boolean hasRemoteParent, diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryTextFormatImpl.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryTextFormatImpl.java index c8c2c3d4009..ac4d76f429d 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryTextFormatImpl.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryTextFormatImpl.java @@ -18,7 +18,7 @@ import java.util.List; import javax.annotation.Nullable; -class OpenTelemetryTextFormatImpl extends TextFormat { +final class OpenTelemetryTextFormatImpl extends TextFormat { private final TextMapPropagator propagator; diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryTraceComponentImpl.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryTraceComponentImpl.java index 4859dd9ca1f..503220353b4 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryTraceComponentImpl.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryTraceComponentImpl.java @@ -30,7 +30,7 @@ public final class OpenTelemetryTraceComponentImpl extends TraceComponent { private final TraceConfig traceConfig = makeTraceConfig(); private final Tracer tracer; - /** Public constructor to be used with reflection loading. */ + @SuppressWarnings("unused") // Loaded via reflection public OpenTelemetryTraceComponentImpl() { clock = MillisClock.getInstance(); RandomHandler randomHandler = new ThreadLocalRandomHandler(); @@ -48,7 +48,7 @@ public PropagationComponent getPropagationComponent() { } @Override - public final Clock getClock() { + public Clock getClock() { return clock; } diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryTracerImpl.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryTracerImpl.java index c3ba98b67fd..4bffa46fc26 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryTracerImpl.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/OpenTelemetryTracerImpl.java @@ -30,10 +30,10 @@ import io.opencensus.trace.config.TraceConfig; import javax.annotation.Nullable; -class OpenTelemetryTracerImpl extends Tracer { +final class OpenTelemetryTracerImpl extends Tracer { private final OpenTelemetrySpanBuilderImpl.Options spanBuilderOptions; - public OpenTelemetryTracerImpl(RandomHandler randomHandler, TraceConfig traceConfig) { + OpenTelemetryTracerImpl(RandomHandler randomHandler, TraceConfig traceConfig) { spanBuilderOptions = new OpenTelemetrySpanBuilderImpl.Options(randomHandler, traceConfig); } diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/SpanConverter.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/SpanConverter.java index fc559d2d063..cf031586e60 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/SpanConverter.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/SpanConverter.java @@ -57,13 +57,22 @@ static SpanContext mapSpanContext(io.opentelemetry.api.trace.SpanContext otelSpa } static io.opentelemetry.api.trace.SpanContext mapSpanContext(SpanContext ocSpanContext) { - return io.opentelemetry.api.trace.SpanContext.create( - ocSpanContext.getTraceId().toLowerBase16(), - ocSpanContext.getSpanId().toLowerBase16(), + return mapSpanContext(ocSpanContext, /* isRemoteParent= */ false); + } + + static io.opentelemetry.api.trace.SpanContext mapSpanContext( + SpanContext ocSpanContext, boolean isRemoteParent) { + String traceId = ocSpanContext.getTraceId().toLowerBase16(); + String spanId = ocSpanContext.getSpanId().toLowerBase16(); + TraceFlags traceFlags = ocSpanContext.getTraceOptions().isSampled() ? TraceFlags.getSampled() - : TraceFlags.getDefault(), - mapTracestate(ocSpanContext.getTracestate())); + : TraceFlags.getDefault(); + TraceState traceState = mapTracestate(ocSpanContext.getTracestate()); + return isRemoteParent + ? io.opentelemetry.api.trace.SpanContext.createFromRemoteParent( + traceId, spanId, traceFlags, traceState) + : io.opentelemetry.api.trace.SpanContext.create(traceId, spanId, traceFlags, traceState); } private static TraceState mapTracestate(Tracestate tracestate) { diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/ThreadLocalRandomHandler.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/ThreadLocalRandomHandler.java index 76b1567d2bf..942a06c8856 100644 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/ThreadLocalRandomHandler.java +++ b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/ThreadLocalRandomHandler.java @@ -15,10 +15,10 @@ * implementation in opencensus-impl, however we do not want to depend on opencensus-impl here. */ @ThreadSafe -public final class ThreadLocalRandomHandler extends RandomHandler { +final class ThreadLocalRandomHandler extends RandomHandler { /** Constructs a new {@code ThreadLocalRandomHandler}. */ - public ThreadLocalRandomHandler() {} + ThreadLocalRandomHandler() {} @Override public Random current() { diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MultiMetricProducer.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MultiMetricProducer.java deleted file mode 100644 index a2494faf783..00000000000 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/MultiMetricProducer.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.opencensusshim.metrics; - -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.internal.export.MetricProducer; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -/** Class that wraps multiple metric producers into one. */ -final class MultiMetricProducer implements MetricProducer { - private final Collection producers; - - public MultiMetricProducer(Collection producers) { - this.producers = producers; - } - - @Override - public Collection collectAllMetrics() { - List result = new ArrayList<>(); - for (MetricProducer p : producers) { - result.addAll(p.collectAllMetrics()); - } - return result; - } -} diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusAttachingMetricReader.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusAttachingMetricReader.java deleted file mode 100644 index 46e0f9534c2..00000000000 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusAttachingMetricReader.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.opencensusshim.metrics; - -import io.opentelemetry.sdk.common.CompletableResultCode; -import io.opentelemetry.sdk.metrics.InstrumentType; -import io.opentelemetry.sdk.metrics.data.AggregationTemporality; -import io.opentelemetry.sdk.metrics.export.CollectionRegistration; -import io.opentelemetry.sdk.metrics.export.MetricReader; -import io.opentelemetry.sdk.metrics.internal.export.MetricProducer; -import io.opentelemetry.sdk.resources.Resource; -import java.util.Arrays; - -/** {@link MetricReader} that appends OpenCensus metrics to anything read. */ -final class OpenCensusAttachingMetricReader implements MetricReader { - private final MetricReader adapted; - - OpenCensusAttachingMetricReader(MetricReader adapted) { - this.adapted = adapted; - } - - @Override - public void register(CollectionRegistration registration) { - // TODO: Find a way to pull the resource off of the SDK. - adapted.register( - new MultiMetricProducer( - Arrays.asList( - MetricProducer.asMetricProducer(registration), - OpenCensusMetricProducer.create(Resource.getDefault())))); - } - - @Override - public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) { - return adapted.getAggregationTemporality(instrumentType); - } - - @Override - public CompletableResultCode forceFlush() { - return adapted.forceFlush(); - } - - @Override - public CompletableResultCode shutdown() { - return adapted.shutdown(); - } -} diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducer.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducer.java deleted file mode 100644 index 77af7ecf5ee..00000000000 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducer.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.opencensusshim.metrics; - -import io.opencensus.metrics.Metrics; -import io.opencensus.metrics.export.MetricProducerManager; -import io.opentelemetry.opencensusshim.internal.metrics.MetricAdapter; -import io.opentelemetry.sdk.metrics.SdkMeterProvider; -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.export.MetricReader; -import io.opentelemetry.sdk.metrics.internal.export.MetricProducer; -import io.opentelemetry.sdk.resources.Resource; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -/** - * A producer instance of OpenCensus metrics. - * - *

The idea here is we can register a merged {@link MetricProducer} combining this with the - * {@link SdkMeterProvider} producer with a {@link MetricReader}, allowing the reader to pull - * metrics from both OpenTelemetry and OpenCensus backends. - */ -final class OpenCensusMetricProducer implements MetricProducer { - private final Resource resource; - private final MetricProducerManager openCensusMetricStorage; - - OpenCensusMetricProducer(Resource resource, MetricProducerManager openCensusMetricStorage) { - this.resource = resource; - this.openCensusMetricStorage = openCensusMetricStorage; - } - - /** - * Constructs a new {@link OpenCensusMetricProducer} that reports against the given {@link - * Resource}. - */ - static MetricProducer create(Resource resource) { - return new OpenCensusMetricProducer( - resource, Metrics.getExportComponent().getMetricProducerManager()); - } - - @Override - public Collection collectAllMetrics() { - List result = new ArrayList<>(); - openCensusMetricStorage - .getAllMetricProducer() - .forEach( - producer -> { - producer - .getMetrics() - .forEach(metric -> result.add(MetricAdapter.convert(resource, metric))); - }); - return result; - } -} diff --git a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetrics.java b/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetrics.java deleted file mode 100644 index fc4d5a2b22c..00000000000 --- a/opencensus-shim/src/main/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetrics.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.opencensusshim.metrics; - -import io.opentelemetry.sdk.metrics.export.MetricReader; - -/** Convenience methods for adapting OpenCensus metrics into OpenTelemetry. */ -public final class OpenCensusMetrics { - private OpenCensusMetrics() {} - - /** - * Attaches OpenCensus metrics to metrics read by the given input. - * - * @param input A {@link MetricReader} that will receive OpenCensus metrics. - * @return The adapted MetricReaderFactory. - */ - public static MetricReader attachTo(MetricReader input) { - return new OpenCensusAttachingMetricReader(input); - } -} diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/DelegatingSpanTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/DelegatingSpanTest.java index 88b63c019bb..2843ed89061 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/DelegatingSpanTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/DelegatingSpanTest.java @@ -8,12 +8,19 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.times; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.StatusCode; +import io.opentelemetry.context.Context; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; +import java.time.Instant; import java.util.Arrays; import java.util.List; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import org.junit.jupiter.api.Test; @@ -80,64 +87,33 @@ static List allInterfaceMethods(Class clazz) { static Stream delegateMethodsProvider() { return Stream.of( Arguments.of("end", new Class[] {}, times(1)), - Arguments.of( - "end", new Class[] {long.class, java.util.concurrent.TimeUnit.class}, times(1)), - Arguments.of("end", new Class[] {java.time.Instant.class}, times(1)), + Arguments.of("end", new Class[] {long.class, TimeUnit.class}, times(1)), + Arguments.of("end", new Class[] {Instant.class}, times(1)), Arguments.of("setAttribute", new Class[] {String.class, String.class}, times(1)), - Arguments.of( - "setAttribute", - new Class[] {io.opentelemetry.api.common.AttributeKey.class, int.class}, - times(1)), - Arguments.of( - "setAttribute", - new Class[] {io.opentelemetry.api.common.AttributeKey.class, Object.class}, - times(1)), + Arguments.of("setAttribute", new Class[] {AttributeKey.class, int.class}, times(1)), + Arguments.of("setAttribute", new Class[] {AttributeKey.class, Object.class}, times(1)), Arguments.of("setAttribute", new Class[] {String.class, long.class}, times(1)), Arguments.of("setAttribute", new Class[] {String.class, double.class}, times(1)), Arguments.of("setAttribute", new Class[] {String.class, boolean.class}, times(1)), Arguments.of( - "recordException", - new Class[] {Throwable.class, io.opentelemetry.api.common.Attributes.class}, - times(1)), + "recordException", new Class[] {Throwable.class, Attributes.class}, times(1)), Arguments.of("recordException", new Class[] {Throwable.class}, times(1)), - Arguments.of( - "setAllAttributes", - new Class[] {io.opentelemetry.api.common.Attributes.class}, - times(1)), + Arguments.of("setAllAttributes", new Class[] {Attributes.class}, times(1)), Arguments.of("updateName", new Class[] {String.class}, times(1)), + Arguments.of("storeInContext", new Class[] {Context.class}, times(1)), + Arguments.of("addEvent", new Class[] {String.class, Instant.class}, times(1)), Arguments.of( - "storeInContext", new Class[] {io.opentelemetry.context.Context.class}, times(1)), - Arguments.of("addEvent", new Class[] {String.class, java.time.Instant.class}, times(1)), + "addEvent", new Class[] {String.class, long.class, TimeUnit.class}, times(1)), Arguments.of( - "addEvent", - new Class[] {String.class, long.class, java.util.concurrent.TimeUnit.class}, - times(1)), - Arguments.of( - "addEvent", - new Class[] { - String.class, io.opentelemetry.api.common.Attributes.class, java.time.Instant.class - }, - times(1)), + "addEvent", new Class[] {String.class, Attributes.class, Instant.class}, times(1)), Arguments.of("addEvent", new Class[] {String.class}, times(1)), Arguments.of( "addEvent", - new Class[] { - String.class, - io.opentelemetry.api.common.Attributes.class, - long.class, - java.util.concurrent.TimeUnit.class - }, + new Class[] {String.class, Attributes.class, long.class, TimeUnit.class}, times(1)), - Arguments.of( - "addEvent", - new Class[] {String.class, io.opentelemetry.api.common.Attributes.class}, - times(1)), - Arguments.of( - "setStatus", - new Class[] {io.opentelemetry.api.trace.StatusCode.class, String.class}, - times(1)), - Arguments.of( - "setStatus", new Class[] {io.opentelemetry.api.trace.StatusCode.class}, times(1)), + Arguments.of("addEvent", new Class[] {String.class, Attributes.class}, times(1)), + Arguments.of("setStatus", new Class[] {StatusCode.class, String.class}, times(1)), + Arguments.of("setStatus", new Class[] {StatusCode.class}, times(1)), // // special cases // @@ -145,7 +121,10 @@ static Stream delegateMethodsProvider() { // `true` Arguments.of("isRecording", new Class[] {}, times(0)), // called twice: once in constructor, then once during delegation - Arguments.of("getSpanContext", new Class[] {}, times(2))); + Arguments.of("getSpanContext", new Class[] {}, times(2)), + // addLink is never called + Arguments.of("addLink", new Class[] {SpanContext.class}, times(0)), + Arguments.of("addLink", new Class[] {SpanContext.class, Attributes.class}, times(0))); } // gets default values for all cases, as mockito can't mock wrappers or primitives, including diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java index 33c21b00c30..4d2fefc7242 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/InteroperabilityTest.java @@ -44,6 +44,7 @@ import io.opentelemetry.sdk.trace.data.StatusData; import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; import io.opentelemetry.sdk.trace.export.SpanExporter; +import io.opentelemetry.sdk.trace.samplers.Sampler; import java.util.Collection; import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.BeforeEach; @@ -71,7 +72,11 @@ class InteroperabilityTest { SpanProcessor spanProcessor = SimpleSpanProcessor.create(spanExporter); openTelemetry = OpenTelemetrySdk.builder() - .setTracerProvider(SdkTracerProvider.builder().addSpanProcessor(spanProcessor).build()) + .setTracerProvider( + SdkTracerProvider.builder() + .setSampler(Sampler.alwaysOn()) + .addSpanProcessor(spanProcessor) + .build()) .buildAndRegisterGlobal(); } @@ -84,7 +89,7 @@ void resetMocks() { } @Test - void testParentChildRelationshipsAreExportedCorrectly() { + void parentChildRelationshipsAreExportedCorrectly() { Tracer tracer = openTelemetry.getTracer("io.opentelemetry.test.scoped.span.1"); Span span = tracer.spanBuilder("OpenTelemetrySpan").startSpan(); try (Scope scope = Context.current().with(span).makeCurrent()) { @@ -128,7 +133,7 @@ void testParentChildRelationshipsAreExportedCorrectly() { } @Test - void testRemoteParent() { + void remoteParent() { io.opencensus.trace.Tracer tracer = Tracing.getTracer(); io.opencensus.trace.Span remoteParentSpan = tracer.spanBuilder("remote parent span").startSpan(); @@ -148,6 +153,15 @@ void testRemoteParent() { assertThat(export1.size()).isEqualTo(1); SpanData spanData1 = export1.iterator().next(); + + // Remote parent should be set to parent span context + assertThat(spanData1.getParentSpanContext().isRemote()).isTrue(); + assertThat(spanData1.getParentSpanContext().getTraceId()) + .isEqualTo(remoteParentSpan.getContext().getTraceId().toLowerBase16()); + assertThat(spanData1.getParentSpanContext().getSpanId()) + .isEqualTo(remoteParentSpan.getContext().getSpanId().toLowerBase16()); + + // Remote parent should be added as link assertThat(spanData1.getName()).isEqualTo("OpenCensusSpan"); assertThat(spanData1.getLinks().get(0).getSpanContext().getSpanId()) .isEqualTo(remoteParentSpan.getContext().getSpanId().toLowerBase16()); @@ -155,7 +169,7 @@ void testRemoteParent() { @Test @SuppressLogger(OpenTelemetrySpanImpl.class) - void testParentChildRelationshipsAreExportedCorrectlyForOpenCensusOnly() { + void parentChildRelationshipsAreExportedCorrectlyForOpenCensusOnly() { io.opencensus.trace.Tracer tracer = Tracing.getTracer(); io.opencensus.trace.Span parentLinkSpan = tracer.spanBuilder("parent link span").startSpan(); try (io.opencensus.common.Scope scope = @@ -261,7 +275,7 @@ void testParentChildRelationshipsAreExportedCorrectlyForOpenCensusOnly() { } @Test - void testOpenTelemetryMethodsOnOpenCensusSpans() { + void openTelemetryMethodsOnOpenCensusSpans() { io.opencensus.trace.Tracer tracer = Tracing.getTracer(); try (io.opencensus.common.Scope scope = tracer @@ -308,7 +322,7 @@ void testOpenTelemetryMethodsOnOpenCensusSpans() { } @Test - public void testNoSampleDoesNotExport() { + public void noSampleDoesNotExport() { io.opencensus.trace.Tracer tracer = Tracing.getTracer(); try (io.opencensus.common.Scope scope = tracer.spanBuilder("OpenCensusSpan").setSampler(Samplers.neverSample()).startScopedSpan()) { @@ -324,14 +338,14 @@ public void testNoSampleDoesNotExport() { } @Test - public void testOpenCensusSamplerIsAlwaysOn() { + public void openCensusSamplerIsAlwaysOn() { // OpenTelemetryTraceComponentImpl provides this behavior assertThat(Tracing.getTraceConfig().getActiveTraceParams().getSampler()) .isEqualTo(Samplers.alwaysSample()); } @Test - public void testByDefaultDoesExport() { + public void byDefaultDoesExport() { io.opencensus.trace.Tracer tracer = Tracing.getTracer(); try (io.opencensus.common.Scope scope = tracer.spanBuilder("OpenCensusSpan").setRecordEvents(false).startScopedSpan()) { diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/MetricAdapterTest.java similarity index 99% rename from opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java rename to opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/MetricAdapterTest.java index 9d1a582aa44..7022477dce6 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/MetricAdapterTest.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.opencensusshim.internal.metrics; +package io.opentelemetry.opencensusshim; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenCensusMetricProducerTest.java similarity index 94% rename from opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java rename to opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenCensusMetricProducerTest.java index 1e9f756ca47..67996be5e06 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenCensusMetricProducerTest.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.opencensusshim.metrics; +package io.opentelemetry.opencensusshim; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; @@ -21,7 +21,7 @@ import io.opencensus.trace.TraceOptions; import io.opencensus.trace.Tracestate; import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.sdk.metrics.internal.export.MetricProducer; +import io.opentelemetry.sdk.metrics.export.MetricProducer; import io.opentelemetry.sdk.resources.Resource; import java.time.Duration; import java.util.Arrays; @@ -31,8 +31,7 @@ import org.junit.jupiter.api.Test; class OpenCensusMetricProducerTest { - private final MetricProducer openCensusMetrics = - OpenCensusMetricProducer.create(Resource.empty()); + private final MetricProducer openCensusMetrics = OpenCensusMetricProducer.create(); private static final Measure.MeasureLong LATENCY_MS = Measure.MeasureLong.create("task_latency", "The task latency in milliseconds", "ms"); @@ -69,7 +68,7 @@ void extractHistogram() throws InterruptedException { .atMost(Duration.ofSeconds(10)) .untilAsserted( () -> - assertThat(openCensusMetrics.collectAllMetrics()) + assertThat(openCensusMetrics.produce(Resource.empty())) .satisfiesExactly( metric -> assertThat(metric) diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenCensusMetricsTest.java similarity index 88% rename from opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java rename to opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenCensusMetricsTest.java index 661054ecc1b..4539045c435 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenCensusMetricsTest.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.opencensusshim.metrics; +package io.opentelemetry.opencensusshim; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; @@ -26,7 +26,10 @@ class OpenCensusMetricsTest { void capturesOpenCensusAndOtelMetrics() throws InterruptedException { InMemoryMetricReader reader = InMemoryMetricReader.create(); SdkMeterProvider otelMetrics = - SdkMeterProvider.builder().registerMetricReader(OpenCensusMetrics.attachTo(reader)).build(); + SdkMeterProvider.builder() + .registerMetricReader(reader) + .registerMetricProducer(OpenCensusMetricProducer.create()) + .build(); // Record an otel metric. otelMetrics.meterBuilder("otel").build().counterBuilder("otel.sum").build().add(1); // Record an OpenCensus metric. @@ -47,7 +50,7 @@ void capturesOpenCensusAndOtelMetrics() throws InterruptedException { .untilAsserted( () -> assertThat(reader.collectAllMetrics()) - .satisfiesExactly( + .satisfiesExactlyInAnyOrder( metric -> assertThat(metric).hasName("otel.sum").hasLongSumSatisfying(sum -> {}), metric -> diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java deleted file mode 100644 index c0bce937a18..00000000000 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.opencensusshim; - -import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; -import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; -import static java.util.stream.Collectors.groupingBy; - -import com.google.common.collect.ImmutableList; -import io.opencensus.common.Duration; -import io.opencensus.stats.Aggregation; -import io.opencensus.stats.Measure; -import io.opencensus.stats.Stats; -import io.opencensus.stats.StatsRecorder; -import io.opencensus.stats.View; -import io.opencensus.stats.ViewManager; -import io.opencensus.tags.TagContext; -import io.opencensus.tags.TagKey; -import io.opencensus.tags.TagMetadata; -import io.opencensus.tags.TagValue; -import io.opencensus.tags.Tagger; -import io.opencensus.tags.Tags; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; -import java.util.HashSet; -import java.util.Set; -import java.util.stream.Collectors; -import org.awaitility.Awaitility; -import org.junit.jupiter.api.Test; - -class OpenTelemetryMetricExporterTest { - - @Test - @SuppressWarnings({"deprecation"}) // Summary is deprecated in census - void testSupportedMetricsExportedCorrectly() { - Tagger tagger = Tags.getTagger(); - Measure.MeasureLong latency = - Measure.MeasureLong.create("task_latency", "The task latency in milliseconds", "ms"); - Measure.MeasureDouble latency2 = - Measure.MeasureDouble.create("task_latency_2", "The task latency in milliseconds 2", "ms"); - StatsRecorder statsRecorder = Stats.getStatsRecorder(); - TagKey tagKey = TagKey.create("tagKey"); - TagValue tagValue = TagValue.create("tagValue"); - View longSumView = - View.create( - View.Name.create("long_sum"), - "long sum", - latency, - Aggregation.Sum.create(), - ImmutableList.of(tagKey)); - View longGaugeView = - View.create( - View.Name.create("long_gauge"), - "long gauge", - latency, - Aggregation.LastValue.create(), - ImmutableList.of(tagKey)); - View doubleSumView = - View.create( - View.Name.create("double_sum"), - "double sum", - latency2, - Aggregation.Sum.create(), - ImmutableList.of()); - View doubleGaugeView = - View.create( - View.Name.create("double_gauge"), - "double gauge", - latency2, - Aggregation.LastValue.create(), - ImmutableList.of()); - ViewManager viewManager = Stats.getViewManager(); - viewManager.registerView(longSumView); - viewManager.registerView(longGaugeView); - viewManager.registerView(doubleSumView); - viewManager.registerView(doubleGaugeView); - // Create OpenCensus -> OpenTelemetry Exporter bridge - InMemoryMetricExporter exporter = InMemoryMetricExporter.create(); - OpenTelemetryMetricsExporter otelExporter = - OpenTelemetryMetricsExporter.createAndRegister(exporter, Duration.create(1, 0)); - try { - TagContext tagContext = - tagger - .emptyBuilder() - .put(tagKey, tagValue, TagMetadata.create(TagMetadata.TagTtl.UNLIMITED_PROPAGATION)) - .build(); - try (io.opencensus.common.Scope ss = tagger.withTagContext(tagContext)) { - statsRecorder.newMeasureMap().put(latency, 50).record(); - statsRecorder.newMeasureMap().put(latency2, 60).record(); - } - Set allowedMetrics = new HashSet<>(); - allowedMetrics.add("double_gauge"); - allowedMetrics.add("double_sum"); - allowedMetrics.add("long_gauge"); - allowedMetrics.add("long_sum"); - // Slow down for OpenCensus to catch up. - Awaitility.await() - .atMost(java.time.Duration.ofSeconds(10)) - .untilAsserted( - () -> - assertThat( - // Filter for metrics with name in allowedMetrics, and dedupe to only one - // metric per unique metric name - exporter.getFinishedMetricItems().stream() - .filter(metric -> allowedMetrics.contains(metric.getName())) - .collect(groupingBy(MetricData::getName)) - .values() - .stream() - .map(metricData -> metricData.get(0)) - .collect(Collectors.toList())) - .satisfiesExactlyInAnyOrder( - metric -> - assertThat(metric) - .hasName("double_gauge") - .hasDescription("double gauge") - .hasUnit("ms") - .hasDoubleGaugeSatisfying( - gauge -> - gauge.hasPointsSatisfying( - point -> - point - .hasValue(60) - .hasAttributes(Attributes.empty()))), - metric -> - assertThat(metric) - .hasName("double_sum") - .hasDescription("double sum") - .hasUnit("ms") - .hasDoubleSumSatisfying( - sum -> - sum.hasPointsSatisfying( - point -> - point - .hasValue(60) - .hasAttributes(Attributes.empty()))), - metric -> - assertThat(metric) - .hasName("long_gauge") - .hasDescription("long gauge") - .hasUnit("ms") - .hasLongGaugeSatisfying( - gauge -> - gauge.hasPointsSatisfying( - point -> - point - .hasValue(50) - .hasAttributes( - attributeEntry( - tagKey.getName(), - tagValue.asString())))), - metric -> - assertThat(metric) - .hasName("long_sum") - .hasDescription("long sum") - .hasUnit("ms") - .hasLongSumSatisfying( - sum -> - sum.hasPointsSatisfying( - point -> - point - .hasValue(50) - .hasAttributes( - attributeEntry( - tagKey.getName(), - tagValue.asString())))))); - } finally { - otelExporter.stop(); - } - } -} diff --git a/opentracing-shim/src/test/java/io/opentelemetry/opentracingshim/SpanShimTest.java b/opentracing-shim/src/test/java/io/opentelemetry/opentracingshim/SpanShimTest.java index 4014739e2f2..f681efe3938 100644 --- a/opentracing-shim/src/test/java/io/opentelemetry/opentracingshim/SpanShimTest.java +++ b/opentracing-shim/src/test/java/io/opentelemetry/opentracingshim/SpanShimTest.java @@ -135,7 +135,7 @@ void baggage_multipleThreads() throws Exception { IntStream.range(0, baggageItemsCount) .forEach(i -> executor.execute(() -> spanShim.setBaggageItem("key-" + i, "value-" + i))); executor.shutdown(); - executor.awaitTermination(5, TimeUnit.SECONDS); + executor.awaitTermination(10, TimeUnit.SECONDS); for (int i = 0; i < baggageItemsCount; i++) { assertThat(spanShim.getBaggageItem("key-" + i)).isEqualTo("value-" + i); diff --git a/opentracing-shim/src/test/java/io/opentelemetry/opentracingshim/TracerShimTest.java b/opentracing-shim/src/test/java/io/opentelemetry/opentracingshim/TracerShimTest.java index d15d7d98d6b..8a961288c23 100644 --- a/opentracing-shim/src/test/java/io/opentelemetry/opentracingshim/TracerShimTest.java +++ b/opentracing-shim/src/test/java/io/opentelemetry/opentracingshim/TracerShimTest.java @@ -45,7 +45,8 @@ class TracerShimTest { static final io.opentelemetry.api.baggage.Baggage EMPTY_BAGGAGE = io.opentelemetry.api.baggage.Baggage.empty(); - @RegisterExtension static OpenTelemetryExtension otelTesting = OpenTelemetryExtension.create(); + @RegisterExtension + static final OpenTelemetryExtension otelTesting = OpenTelemetryExtension.create(); TracerShim tracerShim; TracerProvider provider; diff --git a/perf-harness/src/test/java/io/opentelemetry/perf/OtlpPipelineStressTest.java b/perf-harness/src/test/java/io/opentelemetry/perf/OtlpPipelineStressTest.java index 2e175fd4873..5ab5db7214c 100644 --- a/perf-harness/src/test/java/io/opentelemetry/perf/OtlpPipelineStressTest.java +++ b/perf-harness/src/test/java/io/opentelemetry/perf/OtlpPipelineStressTest.java @@ -61,13 +61,13 @@ public class OtlpPipelineStressTest { public static final int OTLP_RECEIVER_PORT = 4317; public static final int COLLECTOR_PROXY_PORT = 44444; public static final int TOXIPROXY_CONTROL_PORT = 8474; - public static Network network = Network.newNetwork(); - public static AtomicLong totalSpansReceivedByCollector = new AtomicLong(); + public static final Network network = Network.newNetwork(); + public static final AtomicLong totalSpansReceivedByCollector = new AtomicLong(); private static final Logger logger = LoggerFactory.getLogger(OtlpPipelineStressTest.class); @Container - public static GenericContainer collectorContainer = + public static final GenericContainer collectorContainer = new GenericContainer<>( DockerImageName.parse("ghcr.io/open-telemetry/opentelemetry-java/otel-collector")) .withImagePullPolicy(PullPolicy.alwaysPull()) @@ -93,7 +93,7 @@ public class OtlpPipelineStressTest { .waitingFor(new LogMessageWaitStrategy().withRegEx(".*Everything is ready.*")); @Container - public static GenericContainer toxiproxyContainer = + public static final GenericContainer toxiproxyContainer = new GenericContainer<>( DockerImageName.parse("ghcr.io/open-telemetry/opentelemetry-java/toxiproxy")) .withImagePullPolicy(PullPolicy.alwaysPull()) @@ -259,7 +259,7 @@ private void setupSdk() { // set up the span exporter and wire it into the SDK OtlpGrpcSpanExporter spanExporter = OtlpGrpcSpanExporter.builder() - .setMeterProvider(meterProvider) + .setMeterProvider(() -> meterProvider) .setEndpoint( "http://" + toxiproxyContainer.getHost() diff --git a/perf-harness/src/test/resources/otel-collector-config-perf.yaml b/perf-harness/src/test/resources/otel-collector-config-perf.yaml index 0aa39a4d998..8df61145341 100644 --- a/perf-harness/src/test/resources/otel-collector-config-perf.yaml +++ b/perf-harness/src/test/resources/otel-collector-config-perf.yaml @@ -4,8 +4,8 @@ receivers: grpc: exporters: - logging: - loglevel: info + debug: + verbosity: normal sampling_initial: 1 sampling_thereafter: 1 @@ -25,8 +25,8 @@ service: traces: receivers: [otlp] processors: [batch] - exporters: [logging] + exporters: [debug] metrics: receivers: [otlp] processors: [batch] - exporters: [logging] + exporters: [debug] diff --git a/sdk-extensions/autoconfigure-spi/build.gradle.kts b/sdk-extensions/autoconfigure-spi/build.gradle.kts index 53b10f0a104..32985cdf4e9 100644 --- a/sdk-extensions/autoconfigure-spi/build.gradle.kts +++ b/sdk-extensions/autoconfigure-spi/build.gradle.kts @@ -8,4 +8,5 @@ otelJava.moduleName.set("io.opentelemetry.sdk.autoconfigure.spi") dependencies { api(project(":sdk:all")) + compileOnly(project(":api:incubator")) } diff --git a/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/AutoConfigurationCustomizer.java b/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/AutoConfigurationCustomizer.java index e016e6a7424..7ecfa9c8dd1 100644 --- a/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/AutoConfigurationCustomizer.java +++ b/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/AutoConfigurationCustomizer.java @@ -6,12 +6,15 @@ package io.opentelemetry.sdk.autoconfigure.spi; import io.opentelemetry.context.propagation.TextMapPropagator; +import io.opentelemetry.sdk.logs.LogRecordProcessor; import io.opentelemetry.sdk.logs.SdkLoggerProviderBuilder; import io.opentelemetry.sdk.logs.export.LogRecordExporter; import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; import io.opentelemetry.sdk.metrics.export.MetricExporter; +import io.opentelemetry.sdk.metrics.export.MetricReader; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.trace.SdkTracerProviderBuilder; +import io.opentelemetry.sdk.trace.SpanProcessor; import io.opentelemetry.sdk.trace.export.SpanExporter; import io.opentelemetry.sdk.trace.samplers.Sampler; import java.util.Map; @@ -62,6 +65,24 @@ AutoConfigurationCustomizer addSpanExporterCustomizer( BiFunction exporterCustomizer); + /** + * Adds a {@link BiFunction} to invoke for all autoconfigured {@link + * io.opentelemetry.sdk.trace.SpanProcessor}. The return value of the {@link BiFunction} will + * replace the passed-in argument. In contrast to {@link #addSpanExporterCustomizer(BiFunction)} + * this allows modifications to happen before batching occurs. As a result, it is possible to + * efficiently filter spans, add artificial spans or delay spans for enhancing them with external, + * delayed data. + * + *

Multiple calls will execute the customizers in order. + * + * @since 1.33.0 + */ + default AutoConfigurationCustomizer addSpanProcessorCustomizer( + BiFunction + spanProcessorCustomizer) { + return this; + } + /** * Adds a {@link Supplier} of a map of property names and values to use as defaults for the {@link * ConfigProperties} used during auto-configuration. The order of precedence of properties is @@ -130,12 +151,27 @@ default AutoConfigurationCustomizer addMeterProviderCustomizer( * *

Multiple calls will execute the customizers in order. */ + @SuppressWarnings("UnusedReturnValue") default AutoConfigurationCustomizer addMetricExporterCustomizer( BiFunction exporterCustomizer) { return this; } + /** + * Adds a {@link BiFunction} to invoke with the autoconfigured {@link MetricReader} to allow + * customization. The return value of the {@link BiFunction} will replace the passed-in argument. + * + *

Multiple calls will execute the customizers in order. + * + * @since 1.36.0 + */ + @SuppressWarnings("UnusedReturnValue") + default AutoConfigurationCustomizer addMetricReaderCustomizer( + BiFunction readerCustomizer) { + return this; + } + /** * Adds a {@link BiFunction} to invoke the with the {@link SdkLoggerProviderBuilder} to allow * customization. The return value of the {@link BiFunction} will replace the passed-in argument. @@ -164,4 +200,22 @@ default AutoConfigurationCustomizer addLogRecordExporterCustomizer( exporterCustomizer) { return this; } + + /** + * Adds a {@link BiFunction} to invoke for all autoconfigured {@link + * io.opentelemetry.sdk.logs.LogRecordProcessor}s. The return value of the {@link BiFunction} will + * replace the passed-in argument. In contrast to {@link + * #addLogRecordExporterCustomizer(BiFunction)} (BiFunction)} this allows modifications to happen + * before batching occurs. As a result, it is possible to efficiently filter logs, add artificial + * logs or delay logs for enhancing them with external, delayed data. + * + *

Multiple calls will execute the customizers in order. + * + * @since 1.33.0 + */ + default AutoConfigurationCustomizer addLogRecordProcessorCustomizer( + BiFunction + logRecordProcessorCustomizer) { + return this; + } } diff --git a/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/internal/AutoConfigureListener.java b/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/internal/AutoConfigureListener.java new file mode 100644 index 00000000000..e181447adb5 --- /dev/null +++ b/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/internal/AutoConfigureListener.java @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.autoconfigure.spi.internal; + +import io.opentelemetry.sdk.OpenTelemetrySdk; + +/** + * Interface to be extended by SPIs that require access to the autoconfigured {@link + * OpenTelemetrySdk} instance. + * + *

This is not a standalone SPI. Instead, implementations of other SPIs can also implement this + * interface to receive a callback with the configured SDK. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public interface AutoConfigureListener { + + void afterAutoConfigure(OpenTelemetrySdk sdk); +} diff --git a/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/internal/ComponentProvider.java b/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/internal/ComponentProvider.java new file mode 100644 index 00000000000..5772a0a1797 --- /dev/null +++ b/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/internal/ComponentProvider.java @@ -0,0 +1,63 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.autoconfigure.spi.internal; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.context.propagation.TextMapPropagator; +import io.opentelemetry.sdk.logs.LogRecordProcessor; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.SpanProcessor; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import io.opentelemetry.sdk.trace.samplers.Sampler; + +/** + * Provides configured instances of SDK extension components. {@link ComponentProvider} allows SDK + * extension components which are not part of the core SDK to be referenced in declarative based + * configuration. + * + *

NOTE: when {@link #getType()} is {@link Resource}, the {@link #getName()} is not (currently) + * used, and {@link #create(DeclarativeConfigProperties)} is (currently) called with an empty {@link + * DeclarativeConfigProperties}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + * @param the type of the SDK extension component. See {@link #getType()}. Supported values + * include: {@link SpanExporter}, {@link MetricExporter}, {@link LogRecordExporter}, {@link + * SpanProcessor}, {@link LogRecordProcessor}, {@link TextMapPropagator}, {@link Sampler}, + * {@link Resource}. + */ +public interface ComponentProvider { + + /** + * The type of SDK extension component. For example, if providing instances of a custom span + * exporter, the type would be {@link SpanExporter}. + */ + Class getType(); + + /** + * The name of the exporter, to be referenced in configuration files. For example, if providing + * instances of a custom span exporter for the "acme" protocol, the name might be "acme". + * + *

This name MUST not be the same as any other component provider name which returns components + * of the same {@link #getType() type}. In other words, {@link #getType()} and name form a + * composite key uniquely identifying the provider. + */ + String getName(); + + /** + * Configure an instance of the SDK extension component according to the {@code config}. + * + * @param config the configuration provided where the component is referenced in a configuration + * file. + * @return an instance the SDK extension component + */ + // TODO (jack-berg): consider dynamic configuration use case before stabilizing in case that + // affects any API decisions + T create(DeclarativeConfigProperties config); +} diff --git a/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/internal/DefaultConfigProperties.java b/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/internal/DefaultConfigProperties.java index b8673993fe3..b5496df7c48 100644 --- a/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/internal/DefaultConfigProperties.java +++ b/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/internal/DefaultConfigProperties.java @@ -47,7 +47,8 @@ public final class DefaultConfigProperties implements ConfigProperties { * priority over environment variables. */ public static DefaultConfigProperties create(Map defaultProperties) { - return new DefaultConfigProperties(System.getProperties(), System.getenv(), defaultProperties); + return new DefaultConfigProperties( + ConfigUtil.safeSystemProperties(), System.getenv(), defaultProperties); } /** @@ -159,7 +160,7 @@ public Duration getDuration(String name) { try { long rawNumber = Long.parseLong(numberString.trim()); TimeUnit unit = getDurationUnit(unitString.trim()); - return Duration.ofMillis(TimeUnit.MILLISECONDS.convert(rawNumber, unit)); + return Duration.ofNanos(TimeUnit.NANOSECONDS.convert(rawNumber, unit)); } catch (NumberFormatException ex) { throw new ConfigurationException( "Invalid duration property " @@ -256,6 +257,10 @@ private static List filterBlanksAndNulls(String[] values) { /** Returns the TimeUnit associated with a unit string. Defaults to milliseconds. */ private static TimeUnit getDurationUnit(String unitString) { switch (unitString) { + case "us": + return TimeUnit.MICROSECONDS; + case "ns": + return TimeUnit.NANOSECONDS; case "": // Fallthrough expected case "ms": return TimeUnit.MILLISECONDS; diff --git a/sdk-extensions/autoconfigure-spi/src/test/java/io/opentelemetry/sdk/autoconfigure/spi/internal/ConfigPropertiesTest.java b/sdk-extensions/autoconfigure-spi/src/test/java/io/opentelemetry/sdk/autoconfigure/spi/internal/ConfigPropertiesTest.java index e0fe198dc9e..78cc6806c08 100644 --- a/sdk-extensions/autoconfigure-spi/src/test/java/io/opentelemetry/sdk/autoconfigure/spi/internal/ConfigPropertiesTest.java +++ b/sdk-extensions/autoconfigure-spi/src/test/java/io/opentelemetry/sdk/autoconfigure/spi/internal/ConfigPropertiesTest.java @@ -185,8 +185,24 @@ void invalidDuration() { .hasMessage("Invalid duration property duration=9mm. Invalid duration string, found: mm"); } + @Test + void durationNegativeParsing() { + assertThat( + DefaultConfigProperties.createFromMap(Collections.singletonMap("duration", "-41")) + .getDuration("duration")) + .isEqualTo(Duration.ofMillis(-41)); + } + @Test void durationUnitParsing() { + assertThat( + DefaultConfigProperties.createFromMap(Collections.singletonMap("duration", "3ns")) + .getDuration("duration")) + .isEqualTo(Duration.ofNanos(3)); + assertThat( + DefaultConfigProperties.createFromMap(Collections.singletonMap("duration", "2us")) + .getDuration("duration")) + .isEqualTo(Duration.ofNanos(2000)); assertThat( DefaultConfigProperties.createFromMap(Collections.singletonMap("duration", "1")) .getDuration("duration")) diff --git a/sdk-extensions/autoconfigure/README.md b/sdk-extensions/autoconfigure/README.md index 65dd75ac01f..8662e8b2c7f 100644 --- a/sdk-extensions/autoconfigure/README.md +++ b/sdk-extensions/autoconfigure/README.md @@ -3,361 +3,6 @@ This artifact implements environment-based autoconfiguration of the OpenTelemetry SDK. This can be an alternative to programmatic configuration using the normal SDK builders. -All options support being passed as Java system properties, e.g., `-Dotel.traces.exporter=zipkin` or -environment variables, e.g., `OTEL_TRACES_EXPORTER=zipkin`. -## Contents - - - - - -- [General Configuration](#general-configuration) - * [Disabling OpenTelemetrySdk](#disabling-opentelemetrysdk) - * [Exporters](#exporters) - + [OTLP exporter (span, metric, and log exporters)](#otlp-exporter-span-metric-and-log-exporters) - - [OTLP exporter retry](#otlp-exporter-retry) - + [Logging exporter](#logging-exporter) - + [Logging OTLP JSON exporter](#logging-otlp-json-exporter) - * [OpenTelemetry Resource](#opentelemetry-resource) - + [Resource Provider SPI](#resource-provider-spi) - + [Disabling Automatic ResourceProviders](#disabling-automatic-resourceproviders) - * [Attribute limits](#attribute-limits) - * [Propagator](#propagator) -- [Tracer provider](#tracer-provider) - * [Span exporters](#span-exporters) - + [Jaeger exporter](#jaeger-exporter) - + [Zipkin exporter](#zipkin-exporter) - * [Batch span processor](#batch-span-processor) - * [Sampler](#sampler) - * [Span limits](#span-limits) -- [Meter provider](#meter-provider) - * [Exemplars](#exemplars) - * [Periodic Metric Reader](#periodic-metric-reader) - * [Metric exporters](#metric-exporters) - + [Prometheus exporter](#prometheus-exporter) - * [Cardinality limits](#cardinality-limits) -- [Logger provider](#logger-provider) -- [Batch log record processor](#batch-log-record-processor) -- [Customizing the OpenTelemetry SDK](#customizing-the-opentelemetry-sdk) - - - -## General Configuration - -See [Tracer provider](#tracer-provider), [Meter provider](#meter-provider), -and [Logger provider](#logger-provider) for signal specific configuration options. - -The autoconfigure module registers Java shutdown hooks to shut down the SDK when appropriate. Please -note that since this project uses java.util.logging for all of it's logging, some of that logging -may be suppressed during shutdown hooks. This is a bug in the JDK itself, and not something we can -control. If you require logging during shutdown hooks, please consider using `System.out` rather -than a logging framework that might shut itself down in a shutdown hook, thus suppressing your log -messages. See this [JDK bug](https://bugs.openjdk.java.net/browse/JDK-8161253) for more details. - -### Disabling OpenTelemetrySdk - -The OpenTelemetry SDK can be disabled entirely. If disabled, `AutoConfiguredOpenTelemetrySdk#getOpenTelemetrySdk()` will return a minimally configured instance (i.e. `OpenTelemetrySdk.builder().build()`). - -| System property | Environment variable | Purpose | -|-------------------|----------------------|----------------------------------------------------------------| -| otel.sdk.disabled | OTEL_SDK_DISABLED | If `true`, disable the OpenTelemetry SDK. Defaults to `false`. | - -### Exporters - -The following configuration properties are common to all exporters: - -| System property | Environment variable | Purpose | -|-----------------------|-----------------------|----------------------------------------------------------------------------------------------------------------------------| -| otel.traces.exporter | OTEL_TRACES_EXPORTER | List of exporters to be used for tracing, separated by commas. Default is `otlp`. `none` means no autoconfigured exporter. | -| otel.metrics.exporter | OTEL_METRICS_EXPORTER | List of exporters to be used for metrics, separated by commas. Default is `otlp`. `none` means no autoconfigured exporter. | -| otel.logs.exporter | OTEL_LOGS_EXPORTER | List of exporters to be used for logging, separated by commas. Default is `otlp`. `none` means no autoconfigured exporter. | - -#### OTLP exporter (span, metric, and log exporters) - -The [OpenTelemetry Protocol (OTLP)](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/otlp.md) span, metric, and log exporters - -| System property | Environment variable | Description | -|----------------------------------------------------------|----------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| otel.traces.exporter=otlp (default) | OTEL_TRACES_EXPORTER=otlp | Select the OpenTelemetry exporter for tracing (default) | -| otel.metrics.exporter=otlp (default) | OTEL_METRICS_EXPORTER=otlp | Select the OpenTelemetry exporter for metrics (default) | -| otel.logs.exporter=otlp (default) | OTEL_LOGS_EXPORTER=otlp | Select the OpenTelemetry exporter for logs (default) | -| otel.exporter.otlp.endpoint | OTEL_EXPORTER_OTLP_ENDPOINT | The OTLP traces, metrics, and logs endpoint to connect to. Must be a URL with a scheme of either `http` or `https` based on the use of TLS. If protocol is `http/protobuf` the version and signal will be appended to the path (e.g. `v1/traces`, `v1/metrics`, or `v1/logs`). Default is `http://localhost:4317` when protocol is `grpc`, and `http://localhost:4318/v1/{signal}` when protocol is `http/protobuf`. | -| otel.exporter.otlp.traces.endpoint | OTEL_EXPORTER_OTLP_TRACES_ENDPOINT | The OTLP traces endpoint to connect to. Must be a URL with a scheme of either `http` or `https` based on the use of TLS. Default is `http://localhost:4317` when protocol is `grpc`, and `http://localhost:4318/v1/traces` when protocol is `http/protobuf`. | -| otel.exporter.otlp.metrics.endpoint | OTEL_EXPORTER_OTLP_METRICS_ENDPOINT | The OTLP metrics endpoint to connect to. Must be a URL with a scheme of either `http` or `https` based on the use of TLS. Default is `http://localhost:4317` when protocol is `grpc`, and `http://localhost:4318/v1/metrics` when protocol is `http/protobuf`. | -| otel.exporter.otlp.logs.endpoint | OTEL_EXPORTER_OTLP_LOGS_ENDPOINT | The OTLP logs endpoint to connect to. Must be a URL with a scheme of either `http` or `https` based on the use of TLS. Default is `http://localhost:4317` when protocol is `grpc`, and `http://localhost:4318/v1/logs` when protocol is `http/protobuf`. | -| otel.exporter.otlp.certificate | OTEL_EXPORTER_OTLP_CERTIFICATE | The path to the file containing trusted certificates to use when verifying an OTLP trace, metric, or log server's TLS credentials. The file should contain one or more X.509 certificates in PEM format. By default the host platform's trusted root certificates are used. | -| otel.exporter.otlp.traces.certificate | OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE | The path to the file containing trusted certificates to use when verifying an OTLP trace server's TLS credentials. The file should contain one or more X.509 certificates in PEM format. By default the host platform's trusted root certificates are used. | -| otel.exporter.otlp.metrics.certificate | OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE | The path to the file containing trusted certificates to use when verifying an OTLP metric server's TLS credentials. The file should contain one or more X.509 certificates in PEM format. By default the host platform's trusted root certificates are used. | -| otel.exporter.otlp.logs.certificate | OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE | The path to the file containing trusted certificates to use when verifying an OTLP log server's TLS credentials. The file should contain one or more X.509 certificates in PEM format. By default the host platform's trusted root certificates are used. | -| otel.exporter.otlp.client.key | OTEL_EXPORTER_OTLP_CLIENT_KEY | The path to the file containing private client key to use when verifying an OTLP trace, metric, or log client's TLS credentials. The file should contain one private key PKCS8 PEM format. By default no client key is used. | -| otel.exporter.otlp.traces.client.key | OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY | The path to the file containing private client key to use when verifying an OTLP trace client's TLS credentials. The file should contain one private key PKCS8 PEM format. By default no client key file is used. | -| otel.exporter.otlp.metrics.client.key | OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY | The path to the file containing private client key to use when verifying an OTLP metric client's TLS credentials. The file should contain one private key PKCS8 PEM format. By default no client key file is used. | -| otel.exporter.otlp.logs.client.key | OTEL_EXPORTER_OTLP_LOGS_CLIENT_KEY | The path to the file containing private client key to use when verifying an OTLP log client's TLS credentials. The file should contain one private key PKCS8 PEM format. By default no client key file is used. | -| otel.exporter.otlp.client.certificate | OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE | The path to the file containing trusted certificates to use when verifying an OTLP trace, metric, or log client's TLS credentials. The file should contain one or more X.509 certificates in PEM format. By default no chain file is used. | -| otel.exporter.otlp.traces.client.certificate | OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE | The path to the file containing trusted certificates to use when verifying an OTLP trace server's TLS credentials. The file should contain one or more X.509 certificates in PEM format. By default no chain file is used. | -| otel.exporter.otlp.metrics.client.certificate | OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE | The path to the file containing trusted certificates to use when verifying an OTLP metric server's TLS credentials. The file should contain one or more X.509 certificates in PEM format. By default no chain file is used. | -| otel.exporter.otlp.logs.client.certificate | OTEL_EXPORTER_OTLP_LOGS_CLIENT_CERTIFICATE | The path to the file containing trusted certificates to use when verifying an OTLP log server's TLS credentials. The file should contain one or more X.509 certificates in PEM format. By default no chain file is used. | -| otel.exporter.otlp.headers | OTEL_EXPORTER_OTLP_HEADERS | Key-value pairs separated by commas to pass as request headers on OTLP trace, metric, and log requests. | -| otel.exporter.otlp.traces.headers | OTEL_EXPORTER_OTLP_TRACES_HEADERS | Key-value pairs separated by commas to pass as request headers on OTLP trace requests. | -| otel.exporter.otlp.metrics.headers | OTEL_EXPORTER_OTLP_METRICS_HEADERS | Key-value pairs separated by commas to pass as request headers on OTLP metrics requests. | -| otel.exporter.otlp.logs.headers | OTEL_EXPORTER_OTLP_LOGS_HEADERS | Key-value pairs separated by commas to pass as request headers on OTLP logs requests. | -| otel.exporter.otlp.compression | OTEL_EXPORTER_OTLP_COMPRESSION | The compression type to use on OTLP trace, metric, and log requests. Options include `gzip`. By default no compression will be used. | -| otel.exporter.otlp.traces.compression | OTEL_EXPORTER_OTLP_TRACES_COMPRESSION | The compression type to use on OTLP trace requests. Options include `gzip`. By default no compression will be used. | -| otel.exporter.otlp.metrics.compression | OTEL_EXPORTER_OTLP_METRICS_COMPRESSION | The compression type to use on OTLP metric requests. Options include `gzip`. By default no compression will be used. | -| otel.exporter.otlp.logs.compression | OTEL_EXPORTER_OTLP_LOGS_COMPRESSION | The compression type to use on OTLP log requests. Options include `gzip`. By default no compression will be used. | -| otel.exporter.otlp.timeout | OTEL_EXPORTER_OTLP_TIMEOUT | The maximum waiting time, in milliseconds, allowed to send each OTLP trace, metric, and log batch. Default is `10000`. | -| otel.exporter.otlp.traces.timeout | OTEL_EXPORTER_OTLP_TRACES_TIMEOUT | The maximum waiting time, in milliseconds, allowed to send each OTLP trace batch. Default is `10000`. | -| otel.exporter.otlp.metrics.timeout | OTEL_EXPORTER_OTLP_METRICS_TIMEOUT | The maximum waiting time, in milliseconds, allowed to send each OTLP metric batch. Default is `10000`. | -| otel.exporter.otlp.logs.timeout | OTEL_EXPORTER_OTLP_LOGS_TIMEOUT | The maximum waiting time, in milliseconds, allowed to send each OTLP log batch. Default is `10000`. | -| otel.exporter.otlp.protocol | OTEL_EXPORTER_OTLP_PROTOCOL | The transport protocol to use on OTLP trace, metric, and log requests. Options include `grpc` and `http/protobuf`. Default is `grpc`. | -| otel.exporter.otlp.traces.protocol | OTEL_EXPORTER_OTLP_TRACES_PROTOCOL | The transport protocol to use on OTLP trace requests. Options include `grpc` and `http/protobuf`. Default is `grpc`. | -| otel.exporter.otlp.metrics.protocol | OTEL_EXPORTER_OTLP_METRICS_PROTOCOL | The transport protocol to use on OTLP metric requests. Options include `grpc` and `http/protobuf`. Default is `grpc`. | -| otel.exporter.otlp.logs.protocol | OTEL_EXPORTER_OTLP_LOGS_PROTOCOL | The transport protocol to use on OTLP log requests. Options include `grpc` and `http/protobuf`. Default is `grpc`. | -| otel.exporter.otlp.metrics.temporality.preference | OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE | The preferred output aggregation temporality. Options include `DELTA`, `LOWMEMORY`, and `CUMULATIVE`. If `CUMULATIVE`, all instruments will have cumulative temporality. If `DELTA`, counter (sync and async) and histograms will be delta, up down counters (sync and async) will be cumulative. If `LOWMEMORY`, sync counter and histograms will be delta, async counter and up down counters (sync and async) will be cumulative. Default is `CUMULATIVE`. | -| otel.exporter.otlp.metrics.default.histogram.aggregation | OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION | The preferred default histogram aggregation. Options include `BASE2_EXPONENTIAL_BUCKET_HISTOGRAM` and `EXPLICIT_BUCKET_HISTOGRAM`. Default is `EXPLICIT_BUCKET_HISTOGRAM`. | -| otel.experimental.exporter.otlp.retry.enabled | OTEL_EXPERIMENTAL_EXPORTER_OTLP_RETRY_ENABLED | If `true`, enable [experimental retry support](#otlp-exporter-retry). Default is `false`. | - -To configure the service name for the OTLP exporter, add the `service.name` key -to the OpenTelemetry Resource ([see below](#opentelemetry-resource)), e.g. `OTEL_RESOURCE_ATTRIBUTES=service.name=myservice`. - -##### OTLP exporter retry - -[OTLP](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/otlp.md#otlpgrpc-response) requires that [transient](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#retry) errors be handled with a retry strategy. When retry is enabled, retryable gRPC status codes will be retried using an exponential backoff with jitter algorithm as described in the [gRPC Retry Design](https://github.com/grpc/proposal/blob/master/A6-client-retries.md#exponential-backoff). - -The policy has the following configuration, which there is currently no way to customize. - -- `maxAttempts`: The maximum number of attempts, including the original request. Defaults to `5`. -- `initialBackoff`: The initial backoff duration. Defaults to `1s` -- `maxBackoff`: The maximum backoff duration. Defaults to `5s`. -- `backoffMultiplier` THe backoff multiplier. Defaults to `1.5`. - -#### Logging exporter - -The logging exporter prints the name of the span along with its attributes to stdout. It's mainly used for testing and debugging. - -| System property | Environment variable | Description | -|-------------------------------|-------------------------------|----------------------------------------------------------------------| -| otel.traces.exporter=logging | OTEL_TRACES_EXPORTER=logging | Select the logging exporter for tracing | -| otel.metrics.exporter=logging | OTEL_METRICS_EXPORTER=logging | Select the logging exporter for metrics | -| otel.logs.exporter=logging | OTEL_LOGS_EXPORTER=logging | Select the logging exporter for logs | - -#### Logging OTLP JSON exporter - -The logging-otlp exporter writes the telemetry data to the JUL logger in OLTP JSON form. It's a more verbose output mainly used for testing and debugging. - -| System property | Environment variable | Description | -|------------------------------------|------------------------------------|----------------------------------------------------| -| otel.traces.exporter=logging-otlp | OTEL_TRACES_EXPORTER=logging-otlp | Select the logging OTLP JSON exporter for tracing | -| otel.metrics.exporter=logging-otlp | OTEL_METRICS_EXPORTER=logging-otlp | Select the logging OTLP JSON exporter for metrics | -| otel.logs.exporter=logging-otlp | OTEL_LOGS_EXPORTER=logging-otlp | Select the logging OTLP JSON exporter for logs | - -**NOTE:** While the `OtlpJsonLogging{Signal}Exporters` are stable, specifying their use -via `otel.{signal}.exporter=logging-otlp` is experimental and subject to change or removal. - -### OpenTelemetry Resource - -The [OpenTelemetry Resource](https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/resource/sdk.md) -is a representation of the entity producing telemetry. - -| System property | Environment variable | Description | -|------------------------------------------|------------------------------------------|------------------------------------------------------------------------------------------------------------| -| otel.resource.attributes | OTEL_RESOURCE_ATTRIBUTES | Specify resource attributes in the following format: key1=val1,key2=val2,key3=val3 | -| otel.service.name | OTEL_SERVICE_NAME | Specify logical service name. Takes precedence over `service.name` defined with `otel.resource.attributes` | -| otel.experimental.resource.disabled-keys | OTEL_EXPERIMENTAL_RESOURCE_DISABLED_KEYS | Specify resource attribute keys that are filtered. | - -You almost always want to specify the [`service.name`](https://github.com/open-telemetry/opentelemetry-specification/tree/main/specification/resource/semantic_conventions#service) for your application. -It corresponds to how you describe the application, for example `authservice` could be an application that authenticates requests, and `cats` could be an application that returns information about [cats](https://en.wikipedia.org/wiki/Cat). -You would specify that by setting service name property in one of the following ways: -* directly via `OTEL_SERVICE_NAME=authservice` or `-Dotel.service.name=cats` -* by `service.name` resource attribute like `OTEL_RESOURCE_ATTRIBUTES=service.name=authservice`, or `-Dotel.resource.attributes=service.name=cats,service.namespace=mammals`. - -If not specified, SDK defaults the service name to `unknown_service:java`. - -#### Resource Provider SPI - -The [autoconfigure-spi](https://github.com/open-telemetry/opentelemetry-java/tree/main/sdk-extensions/autoconfigure-spi) -SDK extension provides a ResourceProvider SPI that allows libraries to automatically provide -Resources, which are merged into a single Resource by the autoconfiguration module. You can create -your own ResourceProvider, or optionally use an artifact that includes built-in ResourceProviders: - -* [io.opentelemetry.instrumentation:opentelemetry-resources](https://github.com/open-telemetry/opentelemetry-java-instrumentation/tree/main/instrumentation/resources) - includes providers for - a [predefined set of common resources](https://github.com/open-telemetry/opentelemetry-java-instrumentation/tree/main/instrumentation/resources/library/src/main/java/io/opentelemetry/instrumentation/resources) -* [io.opentelemetry.contrib:opentelemetry-aws-resources](https://github.com/open-telemetry/opentelemetry-java-contrib/tree/main/aws-resources) - includes providers - for [common AWS resources](https://github.com/open-telemetry/opentelemetry-java-contrib/tree/main/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource) - -#### Disabling Automatic ResourceProviders - -If you are using the `ResourceProvider` SPI (many instrumentation agent distributions include this automatically), -you can enable / disable one or more of them by using the following configuration items: - -| System property | Environment variable | Description | -|---------------------------------------|---------------------------------------|---------------------------------------------------------------------------------------------| -| otel.java.enabled.resource.providers | OTEL_JAVA_ENABLED_RESOURCE_PROVIDERS | Enables one or more `ResourceProvider` types. If unset, all resource providers are enabled. | -| otel.java.disabled.resource.providers | OTEL_JAVA_DISABLED_RESOURCE_PROVIDERS | Disables one or more `ResourceProvider` types | - -The value for these properties must be a comma separated list of fully qualified `ResourceProvider` classnames. -For example, if you don't want to expose the name of the operating system through the resource, you -can pass the following JVM argument: - -``` --Dotel.java.disabled.resource.providers=io.opentelemetry.instrumentation.resources.OsResourceProvider -``` - -### Attribute limits - -These properties can be used to control the maximum number and length of attributes. - -| System property | Environment variable | Description | -|-----------------------------------|-----------------------------------|----------------------------------------------------------------------------------------------------------| -| otel.attribute.value.length.limit | OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT | The maximum length of attribute values. Applies to spans and logs. By default there is no limit. | -| otel.attribute.count.limit | OTEL_ATTRIBUTE_COUNT_LIMIT | The maximum number of attributes. Applies to spans, span events, span links, and logs. Default is `128`. | - -### Propagator - -The propagators determine which distributed tracing header formats are used, and which baggage propagation header formats are used. - -| System property | Environment variable | Description | -|------------------|----------------------|---------------------------------------------------------------------------------------------------------------------------| -| otel.propagators | OTEL_PROPAGATORS | The propagators to be used. Use a comma-separated list for multiple propagators. Default is `tracecontext,baggage` (W3C). | - -Supported values are - -- `"tracecontext"`: [W3C Trace Context](https://www.w3.org/TR/trace-context/) (add `baggage` as well to include W3C baggage) -- `"baggage"`: [W3C Baggage](https://www.w3.org/TR/baggage/) -- `"b3"`: [B3 Single](https://github.com/openzipkin/b3-propagation#single-header) -- `"b3multi"`: [B3 Multi](https://github.com/openzipkin/b3-propagation#multiple-headers) -- `"jaeger"`: [Jaeger](https://www.jaegertracing.io/docs/1.21/client-libraries/#propagation-format) (includes Jaeger baggage) -- `"xray"`: [AWS X-Ray](https://docs.aws.amazon.com/xray/latest/devguide/xray-concepts.html#xray-concepts-tracingheader) -- `"ottrace"`: [OT Trace](https://github.com/opentracing?q=basic&type=&language=) - -## Tracer provider - -The following configuration options are specific to `SdkTracerProvider`. See [general configuration](#general-configuration) for general configuration. - -### Span exporters - -The following exporters are only available for the trace signal. See [exporters](#exporters) for general exporter configuration. - -#### Jaeger exporter - -The [Jaeger](https://www.jaegertracing.io/docs/1.21/apis/#protobuf-via-grpc-stable) exporter. This exporter uses gRPC for its communications protocol. - -| System property | Environment variable | Description | -|-----------------------------------|-----------------------------------|----------------------------------------------------------------------------------------------------| -| otel.traces.exporter=jaeger | OTEL_TRACES_EXPORTER=jaeger | Select the Jaeger exporter | -| otel.exporter.jaeger.endpoint | OTEL_EXPORTER_JAEGER_ENDPOINT | The Jaeger gRPC endpoint to connect to. Default is `http://localhost:14250`. | -| otel.exporter.jaeger.timeout | OTEL_EXPORTER_JAEGER_TIMEOUT | The maximum waiting time, in milliseconds, allowed to send each batch. Default is `10000`. | - -#### Zipkin exporter - -The [Zipkin](https://zipkin.io/zipkin-api/) exporter. It sends JSON in [Zipkin format](https://zipkin.io/zipkin-api/#/default/post_spans) to a specified HTTP URL. - -| System property | Environment variable | Description | -|-------------------------------|-------------------------------|-----------------------------------------------------------------------------------------------------------------------| -| otel.traces.exporter=zipkin | OTEL_TRACES_EXPORTER=zipkin | Select the Zipkin exporter | -| otel.exporter.zipkin.endpoint | OTEL_EXPORTER_ZIPKIN_ENDPOINT | The Zipkin endpoint to connect to. Default is `http://localhost:9411/api/v2/spans`. Currently only HTTP is supported. | - -### Batch span processor - -| System property | Environment variable | Description | -|--------------------------------|--------------------------------|------------------------------------------------------------------------------------| -| otel.bsp.schedule.delay | OTEL_BSP_SCHEDULE_DELAY | The interval, in milliseconds, between two consecutive exports. Default is `5000`. | -| otel.bsp.max.queue.size | OTEL_BSP_MAX_QUEUE_SIZE | The maximum queue size. Default is `2048`. | -| otel.bsp.max.export.batch.size | OTEL_BSP_MAX_EXPORT_BATCH_SIZE | The maximum batch size. Default is `512`. | -| otel.bsp.export.timeout | OTEL_BSP_EXPORT_TIMEOUT | The maximum allowed time, in milliseconds, to export data. Default is `30000`. | - -### Sampler - -The sampler configures whether spans will be recorded for any call to `SpanBuilder.startSpan`. - -| System property | Environment variable | Description | -|-------------------------|-------------------------|-------------------------------------------------------------------------| -| otel.traces.sampler | OTEL_TRACES_SAMPLER | The sampler to use for tracing. Defaults to `parentbased_always_on` | -| otel.traces.sampler.arg | OTEL_TRACES_SAMPLER_ARG | An argument to the configured tracer if supported, for example a ratio. | - -Supported values for `otel.traces.sampler` are - -- "always_on": AlwaysOnSampler -- "always_off": AlwaysOffSampler -- "traceidratio": TraceIdRatioBased. `otel.traces.sampler.arg` sets the ratio. -- "parentbased_always_on": ParentBased(root=AlwaysOnSampler) -- "parentbased_always_off": ParentBased(root=AlwaysOffSampler) -- "parentbased_traceidratio": ParentBased(root=TraceIdRatioBased). `otel.traces.sampler.arg` sets the ratio. - -### Span limits - -See [attribute limits](#attribute-limits) for general attribute limit configuration. - -These properties can be used to control the maximum size of spans by placing limits on attributes, events, and links. - -| System property | Environment variable | Description | -|----------------------------------------|----------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------| -| otel.span.attribute.value.length.limit | OTEL_SPAN_ATTRIBUTE_VALUE_LENGTH_LIMIT | The maximum length of span attribute values. Takes precedence over `otel.attribute.value.length.limit`. By default there is no limit. | -| otel.span.attribute.count.limit | OTEL_SPAN_ATTRIBUTE_COUNT_LIMIT | The maximum number of attributes per span. Takes precedence over `otel.attribute.count.limit`. Default is `128`. | -| otel.span.event.count.limit | OTEL_SPAN_EVENT_COUNT_LIMIT | The maximum number of events per span. Default is `128`. | -| otel.span.link.count.limit | OTEL_SPAN_LINK_COUNT_LIMIT | The maximum number of links per span. Default is `128` | - -## Meter provider - -The following configuration options are specific to `SdkMeterProvider`. See [general configuration](#general-configuration) for general configuration. - -### Exemplars - -| System property | Environment variable | Description | -|------------------------------|------------------------------|-----------------------------------------------------------------------------------------------------------------| -| otel.metrics.exemplar.filter | OTEL_METRICS_EXEMPLAR_FILTER | The filter for exemplar sampling. Can be `ALWAYS_OFF`, `ALWAYS_ON` or `TRACE_BASED`. Default is `TRACE_BASED`. | - -### Periodic Metric Reader - -| System property | Environment variable | Description | -|-----------------------------|-----------------------------|----------------------------------------------------------------------------------------------| -| otel.metric.export.interval | OTEL_METRIC_EXPORT_INTERVAL | The interval, in milliseconds, between the start of two export attempts. Default is `60000`. | - -### Metric exporters - -The following exporters are only available for the metric signal. See [exporters](#exporters) for general exporter configuration. - -#### Prometheus exporter - -The [Prometheus](https://github.com/prometheus/docs/blob/master/content/docs/instrumenting/exposition_formats.md) exporter. - -| System property | Environment variable | Description | -|----------------------------------|----------------------------------|------------------------------------------------------------------------------------| -| otel.metrics.exporter=prometheus | OTEL_METRICS_EXPORTER=prometheus | Select the Prometheus exporter | -| otel.exporter.prometheus.port | OTEL_EXPORTER_PROMETHEUS_PORT | The local port used to bind the prometheus metric server. Default is `9464`. | -| otel.exporter.prometheus.host | OTEL_EXPORTER_PROMETHEUS_HOST | The local address used to bind the prometheus metric server. Default is `0.0.0.0`. | - -Note that this is a pull exporter - it opens up a server on the local process listening on the specified host and port, which -a Prometheus server scrapes from. - -### Cardinality Limits - -| System property | Environment variable | Description | -|---------------------------------------------|---------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------| -| otel.experimental.metrics.cardinality.limit | OTEL_EXPERIMENTAL_METRICS_CARDINALITY_LIMIT | If set, configure experimental cardinality limit. The value dictates the maximum number of distinct points per metric. Default is `2000`. | - -## Logger provider - -The following configuration options are specific to `SdkLoggerProvider`. See [general configuration](#general-configuration) for general configuration. - -## Batch log record processor - -| System property | Environment variable | Description | -|---------------------------------|---------------------------------|------------------------------------------------------------------------------------| -| otel.blrp.schedule.delay | OTEL_BLRP_SCHEDULE_DELAY | The interval, in milliseconds, between two consecutive exports. Default is `1000`. | -| otel.blrp.max.queue.size | OTEL_BLRP_MAX_QUEUE_SIZE | The maximum queue size. Default is `2048`. | -| otel.blrp.max.export.batch.size | OTEL_BLRP_MAX_EXPORT_BATCH_SIZE | The maximum batch size. Default is `512`. | -| otel.blrp.export.timeout | OTEL_BLRP_EXPORT_TIMEOUT | The maximum allowed time, in milliseconds, to export data. Default is `30000`. | - -## Customizing the OpenTelemetry SDK - -Autoconfiguration exposes SPI [hooks](../autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi) for customizing behavior programmatically as needed. -It's recommended to use the above configuration properties where possible, only implementing the SPI to add functionality not found in the -SDK by default. +The full documentation on the available configuration options has been moved to +[opentelemetry.io](https://opentelemetry.io/docs/languages/java/configuration/) diff --git a/sdk-extensions/autoconfigure/build.gradle.kts b/sdk-extensions/autoconfigure/build.gradle.kts index 23099336337..a00685f6e6d 100644 --- a/sdk-extensions/autoconfigure/build.gradle.kts +++ b/sdk-extensions/autoconfigure/build.gradle.kts @@ -10,7 +10,7 @@ dependencies { api(project(":sdk:all")) api(project(":sdk-extensions:autoconfigure-spi")) - implementation(project(":api:events")) + compileOnly(project(":api:incubator")) annotationProcessor("com.google.auto.value:auto-value") @@ -47,17 +47,17 @@ testing { } register("testFullConfig") { dependencies { - implementation(project(":api:events")) implementation(project(":extensions:trace-propagators")) - implementation(project(":exporters:jaeger")) implementation(project(":exporters:logging")) implementation(project(":exporters:logging-otlp")) implementation(project(":exporters:otlp:all")) implementation(project(":exporters:prometheus")) + implementation("io.prometheus:prometheus-metrics-exporter-httpserver") implementation(project(":exporters:zipkin")) implementation(project(":sdk:testing")) implementation(project(":sdk:trace-shaded-deps")) implementation(project(":sdk-extensions:jaeger-remote-sampler")) + implementation(project(":sdk-extensions:incubator")) implementation("com.google.guava:guava") implementation("io.opentelemetry.proto:opentelemetry-proto") @@ -68,17 +68,24 @@ testing { targets { all { testTask { - environment("OTEL_RESOURCE_ATTRIBUTES", "service.name=test,cat=meow") + environment("OTEL_SERVICE_NAME", "test") + environment("OTEL_RESOURCE_ATTRIBUTES", "cat=meow") environment("OTEL_PROPAGATORS", "tracecontext,baggage,b3,b3multi,jaeger,ottrace,test") environment("OTEL_EXPORTER_OTLP_HEADERS", "cat=meow,dog=bark") environment("OTEL_EXPORTER_OTLP_TIMEOUT", "5000") - environment("OTEL_SPAN_ATTRIBUTE_COUNT_LIMIT", "2") environment("OTEL_TEST_CONFIGURED", "true") environment("OTEL_TEST_WRAPPED", "1") } } } } + register("testIncubating") { + dependencies { + implementation(project(":sdk-extensions:incubator")) + implementation(project(":exporters:logging")) + implementation(project(":sdk:testing")) + } + } } } diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdk.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdk.java index 7d1bb1ba2cb..c5d9c77f4ee 100644 --- a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdk.java +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdk.java @@ -7,9 +7,13 @@ import com.google.auto.value.AutoValue; import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.incubator.config.ConfigProvider; import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.autoconfigure.internal.AutoConfigureUtil; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; import io.opentelemetry.sdk.resources.Resource; +import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; /** @@ -42,21 +46,48 @@ public static AutoConfiguredOpenTelemetrySdkBuilder builder() { } static AutoConfiguredOpenTelemetrySdk create( - OpenTelemetrySdk sdk, Resource resource, ConfigProperties config) { - return new AutoValue_AutoConfiguredOpenTelemetrySdk(sdk, resource, config); + OpenTelemetrySdk sdk, + Resource resource, + @Nullable ConfigProperties config, + @Nullable Object configProvider) { + return new AutoValue_AutoConfiguredOpenTelemetrySdk(sdk, resource, config, configProvider); } /** - * Returns the {@link OpenTelemetrySdk} that was auto-configured, or {@code null} if the SDK has - * been disabled. + * Returns the {@link OpenTelemetrySdk} that was auto-configured, or an effectively noop instance + * if the SDK has been disabled. + * + *

The instance returned if the SDK is disabled is equivalent to {@code + * OpenTelemetrySdk.builder().build()}, which is notably not the same as {@link + * OpenTelemetry#noop()}. */ public abstract OpenTelemetrySdk getOpenTelemetrySdk(); /** Returns the {@link Resource} that was auto-configured. */ abstract Resource getResource(); - /** Returns the {@link ConfigProperties} used for auto-configuration. */ + /** + * Returns the {@link ConfigProperties} used for auto-configuration, or {@code null} if + * declarative configuration was used. + * + *

This method is experimental so not public. You may reflectively call it using {@link + * AutoConfigureUtil#getConfig(AutoConfiguredOpenTelemetrySdk)}. + * + * @see #getConfigProvider() + */ + @Nullable abstract ConfigProperties getConfig(); + /** + * Returns the {@link ConfigProvider}, or {@code null} if declarative configuration was not used. + * + *

This method is experimental so not public. You may reflectively call it using {@link + * AutoConfigureUtil#getConfigProvider(AutoConfiguredOpenTelemetrySdk)}. + * + * @see #getConfig() + */ + @Nullable + abstract Object getConfigProvider(); + AutoConfiguredOpenTelemetrySdk() {} } diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdkBuilder.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdkBuilder.java index 14a25e93a62..e12848f0d81 100644 --- a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdkBuilder.java +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdkBuilder.java @@ -8,27 +8,30 @@ import static java.util.Objects.requireNonNull; import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.api.events.GlobalEventEmitterProvider; import io.opentelemetry.context.propagation.ContextPropagators; import io.opentelemetry.context.propagation.TextMapPropagator; import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.OpenTelemetrySdkBuilder; +import io.opentelemetry.sdk.autoconfigure.internal.ComponentLoader; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer; import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.autoconfigure.spi.internal.AutoConfigureListener; import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import io.opentelemetry.sdk.logs.LogRecordProcessor; import io.opentelemetry.sdk.logs.SdkLoggerProvider; import io.opentelemetry.sdk.logs.SdkLoggerProviderBuilder; import io.opentelemetry.sdk.logs.export.LogRecordExporter; -import io.opentelemetry.sdk.logs.internal.SdkEventEmitterProvider; import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; import io.opentelemetry.sdk.metrics.export.MetricExporter; +import io.opentelemetry.sdk.metrics.export.MetricReader; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.SdkTracerProviderBuilder; +import io.opentelemetry.sdk.trace.SpanProcessor; import io.opentelemetry.sdk.trace.export.SpanExporter; import io.opentelemetry.sdk.trace.samplers.Sampler; import java.io.Closeable; @@ -56,6 +59,21 @@ public final class AutoConfiguredOpenTelemetrySdkBuilder implements AutoConfigur private static final Logger logger = Logger.getLogger(AutoConfiguredOpenTelemetrySdkBuilder.class.getName()); + private static final boolean INCUBATOR_AVAILABLE; + + static { + boolean incubatorAvailable = false; + try { + Class.forName( + "io.opentelemetry.sdk.extension.incubator.fileconfig.DeclarativeConfiguration", + false, + AutoConfiguredOpenTelemetrySdkBuilder.class.getClassLoader()); + incubatorAvailable = true; + } catch (ClassNotFoundException e) { + // Not available + } + INCUBATOR_AVAILABLE = incubatorAvailable; + } @Nullable private ConfigProperties config; @@ -65,6 +83,9 @@ public final class AutoConfiguredOpenTelemetrySdkBuilder implements AutoConfigur propagatorCustomizer = (a, unused) -> a; private BiFunction spanExporterCustomizer = (a, unused) -> a; + + private BiFunction + spanProcessorCustomizer = (a, unused) -> a; private BiFunction samplerCustomizer = (a, unused) -> a; @@ -72,11 +93,15 @@ public final class AutoConfiguredOpenTelemetrySdkBuilder implements AutoConfigur meterProviderCustomizer = (a, unused) -> a; private BiFunction metricExporterCustomizer = (a, unused) -> a; + private BiFunction + metricReaderCustomizer = (a, unused) -> a; private BiFunction loggerProviderCustomizer = (a, unused) -> a; private BiFunction logRecordExporterCustomizer = (a, unused) -> a; + private BiFunction + logRecordProcessorCustomizer = (a, unused) -> a; private BiFunction resourceCustomizer = (a, unused) -> a; @@ -86,8 +111,11 @@ public final class AutoConfiguredOpenTelemetrySdkBuilder implements AutoConfigur private final List>> propertiesCustomizers = new ArrayList<>(); - private SpiHelper spiHelper = - SpiHelper.create(AutoConfiguredOpenTelemetrySdk.class.getClassLoader()); + private Function configPropertiesCustomizer = + Function.identity(); + + private ComponentLoader componentLoader = + SpiHelper.serviceComponentLoader(AutoConfiguredOpenTelemetrySdk.class.getClassLoader()); private boolean registerShutdownHook = true; @@ -185,6 +213,26 @@ public AutoConfiguredOpenTelemetrySdkBuilder addSpanExporterCustomizer( return this; } + /** + * Adds a {@link BiFunction} to invoke for all autoconfigured {@link + * io.opentelemetry.sdk.trace.SpanProcessor}. The return value of the {@link BiFunction} will + * replace the passed-in argument. In contrast to {@link #addSpanExporterCustomizer(BiFunction)} + * this allows modifications to happen before batching occurs. As a result, it is possible to + * efficiently filter spans, add artificial spans or delay spans for enhancing them with external, + * delayed data. + * + *

Multiple calls will execute the customizers in order. + */ + @Override + public AutoConfiguredOpenTelemetrySdkBuilder addSpanProcessorCustomizer( + BiFunction + spanProcessorCustomizer) { + requireNonNull(spanProcessorCustomizer, "spanProcessorCustomizer"); + this.spanProcessorCustomizer = + mergeCustomizer(this.spanProcessorCustomizer, spanProcessorCustomizer); + return this; + } + /** * Adds a {@link Supplier} of a map of property names and values to use as defaults for the {@link * ConfigProperties} used during auto-configuration. The order of precedence of properties is @@ -217,6 +265,21 @@ public AutoConfiguredOpenTelemetrySdkBuilder addPropertiesCustomizer( return this; } + /** + * Adds a {@link Function} to invoke the with the {@link ConfigProperties} to allow customization. + * + *

The argument to the function is the {@link ConfigProperties}, with the {@link + * #addPropertiesCustomizer(Function)} already applied. + * + *

The return value of the {@link Function} replace the {@link ConfigProperties} to be used. + */ + AutoConfiguredOpenTelemetrySdkBuilder setConfigPropertiesCustomizer( + Function configPropertiesCustomizer) { + requireNonNull(configPropertiesCustomizer, "configPropertiesCustomizer"); + this.configPropertiesCustomizer = configPropertiesCustomizer; + return this; + } + /** * Adds a {@link BiFunction} to invoke the with the {@link SdkMeterProviderBuilder} to allow * customization. The return value of the {@link BiFunction} will replace the passed-in argument. @@ -250,6 +313,20 @@ public AutoConfiguredOpenTelemetrySdkBuilder addMetricExporterCustomizer( return this; } + /** + * Adds a {@link BiFunction} to invoke with the autoconfigured {@link MetricReader} to allow + * customization. The return value of the {@link BiFunction} will replace the passed-in argument. + * + *

Multiple calls will execute the customizers in order. + */ + @Override + public AutoConfiguredOpenTelemetrySdkBuilder addMetricReaderCustomizer( + BiFunction readerCustomizer) { + requireNonNull(readerCustomizer, "readerCustomizer"); + this.metricReaderCustomizer = mergeCustomizer(this.metricReaderCustomizer, readerCustomizer); + return this; + } + /** * Adds a {@link BiFunction} to invoke the with the {@link SdkLoggerProviderBuilder} to allow * customization. The return value of the {@link BiFunction} will replace the passed-in argument. @@ -283,6 +360,26 @@ public AutoConfiguredOpenTelemetrySdkBuilder addLogRecordExporterCustomizer( return this; } + /** + * Adds a {@link BiFunction} to invoke for all autoconfigured {@link + * io.opentelemetry.sdk.logs.LogRecordProcessor}s. The return value of the {@link BiFunction} will + * replace the passed-in argument. In contrast to {@link + * #addLogRecordExporterCustomizer(BiFunction)} (BiFunction)} this allows modifications to happen + * before batching occurs. As a result, it is possible to efficiently filter logs, add artificial + * logs or delay logs for enhancing them with external, delayed data. + * + *

Multiple calls will execute the customizers in order. + */ + @Override + public AutoConfiguredOpenTelemetrySdkBuilder addLogRecordProcessorCustomizer( + BiFunction + logRecordProcessorCustomizer) { + requireNonNull(logRecordProcessorCustomizer, "logRecordProcessorCustomizer"); + this.logRecordProcessorCustomizer = + mergeCustomizer(this.logRecordProcessorCustomizer, logRecordProcessorCustomizer); + return this; + } + /** * Disable the registration of a shutdown hook to shut down the SDK when appropriate. By default, * the shutdown hook is registered. @@ -311,7 +408,14 @@ public AutoConfiguredOpenTelemetrySdkBuilder setResultAsGlobal() { public AutoConfiguredOpenTelemetrySdkBuilder setServiceClassLoader( ClassLoader serviceClassLoader) { requireNonNull(serviceClassLoader, "serviceClassLoader"); - this.spiHelper = SpiHelper.create(serviceClassLoader); + this.componentLoader = SpiHelper.serviceComponentLoader(serviceClassLoader); + return this; + } + + /** Sets the {@link ComponentLoader} to be used to load SPI implementations. */ + AutoConfiguredOpenTelemetrySdkBuilder setComponentLoader(ComponentLoader componentLoader) { + requireNonNull(componentLoader, "componentLoader"); + this.componentLoader = componentLoader; return this; } @@ -320,6 +424,7 @@ public AutoConfiguredOpenTelemetrySdkBuilder setServiceClassLoader( * the settings of this {@link AutoConfiguredOpenTelemetrySdkBuilder}. */ public AutoConfiguredOpenTelemetrySdk build() { + SpiHelper spiHelper = SpiHelper.create(componentLoader); if (!customized) { customized = true; mergeSdkTracerProviderConfigurer(); @@ -331,6 +436,15 @@ public AutoConfiguredOpenTelemetrySdk build() { ConfigProperties config = getConfig(); + AutoConfiguredOpenTelemetrySdk fromFileConfiguration = + maybeConfigureFromFile(config, componentLoader); + if (fromFileConfiguration != null) { + maybeRegisterShutdownHook(fromFileConfiguration.getOpenTelemetrySdk()); + maybeSetAsGlobal( + fromFileConfiguration.getOpenTelemetrySdk(), fromFileConfiguration.getConfigProvider()); + return fromFileConfiguration; + } + Resource resource = ResourceConfiguration.configureResource(config, spiHelper, resourceCustomizer); @@ -339,72 +453,24 @@ public AutoConfiguredOpenTelemetrySdk build() { List closeables = new ArrayList<>(); try { - OpenTelemetrySdk openTelemetrySdk = OpenTelemetrySdk.builder().build(); - boolean sdkEnabled = !config.getBoolean("otel.sdk.disabled", false); + OpenTelemetrySdkBuilder sdkBuilder = OpenTelemetrySdk.builder(); + // The propagation system is part of the API and functions in the absence of an SDK. + ContextPropagators propagators = + PropagatorConfiguration.configurePropagators(config, spiHelper, propagatorCustomizer); + sdkBuilder.setPropagators(propagators); + + boolean sdkEnabled = !config.getBoolean("otel.sdk.disabled", false); if (sdkEnabled) { - SdkMeterProviderBuilder meterProviderBuilder = SdkMeterProvider.builder(); - meterProviderBuilder.setResource(resource); - MeterProviderConfiguration.configureMeterProvider( - meterProviderBuilder, config, spiHelper, metricExporterCustomizer, closeables); - meterProviderBuilder = meterProviderCustomizer.apply(meterProviderBuilder, config); - SdkMeterProvider meterProvider = meterProviderBuilder.build(); - closeables.add(meterProvider); - - SdkTracerProviderBuilder tracerProviderBuilder = SdkTracerProvider.builder(); - tracerProviderBuilder.setResource(resource); - TracerProviderConfiguration.configureTracerProvider( - tracerProviderBuilder, - config, - spiHelper, - meterProvider, - spanExporterCustomizer, - samplerCustomizer, - closeables); - tracerProviderBuilder = tracerProviderCustomizer.apply(tracerProviderBuilder, config); - SdkTracerProvider tracerProvider = tracerProviderBuilder.build(); - closeables.add(tracerProvider); - - SdkLoggerProviderBuilder loggerProviderBuilder = SdkLoggerProvider.builder(); - loggerProviderBuilder.setResource(resource); - LoggerProviderConfiguration.configureLoggerProvider( - loggerProviderBuilder, - config, - spiHelper, - meterProvider, - logRecordExporterCustomizer, - closeables); - loggerProviderBuilder = loggerProviderCustomizer.apply(loggerProviderBuilder, config); - SdkLoggerProvider loggerProvider = loggerProviderBuilder.build(); - closeables.add(loggerProvider); - - ContextPropagators propagators = - PropagatorConfiguration.configurePropagators(config, spiHelper, propagatorCustomizer); - - OpenTelemetrySdkBuilder sdkBuilder = - OpenTelemetrySdk.builder() - .setTracerProvider(tracerProvider) - .setLoggerProvider(loggerProvider) - .setMeterProvider(meterProvider) - .setPropagators(propagators); - - openTelemetrySdk = sdkBuilder.build(); + configureSdk(sdkBuilder, config, resource, spiHelper, closeables); } - // NOTE: Shutdown hook registration is untested. Modify with caution. - if (registerShutdownHook) { - Runtime.getRuntime().addShutdownHook(shutdownHook(openTelemetrySdk)); - } + OpenTelemetrySdk openTelemetrySdk = sdkBuilder.build(); + maybeRegisterShutdownHook(openTelemetrySdk); + maybeSetAsGlobal(openTelemetrySdk, null); + callAutoConfigureListeners(spiHelper, openTelemetrySdk); - if (setResultAsGlobal) { - GlobalOpenTelemetry.set(openTelemetrySdk); - GlobalEventEmitterProvider.set( - SdkEventEmitterProvider.create(openTelemetrySdk.getSdkLoggerProvider())); - logger.log( - Level.FINE, "Global OpenTelemetry set to {0} by autoconfiguration", openTelemetrySdk); - } - - return AutoConfiguredOpenTelemetrySdk.create(openTelemetrySdk, resource, config); + return AutoConfiguredOpenTelemetrySdk.create(openTelemetrySdk, resource, config, null); } catch (RuntimeException e) { logger.info( "Error encountered during autoconfiguration. Closing partially configured components."); @@ -424,10 +490,117 @@ public AutoConfiguredOpenTelemetrySdk build() { } } + // Visible for testing + void configureSdk( + OpenTelemetrySdkBuilder sdkBuilder, + ConfigProperties config, + Resource resource, + SpiHelper spiHelper, + List closeables) { + SdkMeterProviderBuilder meterProviderBuilder = SdkMeterProvider.builder(); + meterProviderBuilder.setResource(resource); + + MeterProviderConfiguration.configureMeterProvider( + meterProviderBuilder, + config, + spiHelper, + metricReaderCustomizer, + metricExporterCustomizer, + closeables); + meterProviderBuilder = meterProviderCustomizer.apply(meterProviderBuilder, config); + SdkMeterProvider meterProvider = meterProviderBuilder.build(); + closeables.add(meterProvider); + + SdkTracerProviderBuilder tracerProviderBuilder = SdkTracerProvider.builder(); + tracerProviderBuilder.setResource(resource); + TracerProviderConfiguration.configureTracerProvider( + tracerProviderBuilder, + config, + spiHelper, + meterProvider, + spanExporterCustomizer, + spanProcessorCustomizer, + samplerCustomizer, + closeables); + tracerProviderBuilder = tracerProviderCustomizer.apply(tracerProviderBuilder, config); + SdkTracerProvider tracerProvider = tracerProviderBuilder.build(); + closeables.add(tracerProvider); + + SdkLoggerProviderBuilder loggerProviderBuilder = SdkLoggerProvider.builder(); + loggerProviderBuilder.setResource(resource); + LoggerProviderConfiguration.configureLoggerProvider( + loggerProviderBuilder, + config, + spiHelper, + meterProvider, + logRecordExporterCustomizer, + logRecordProcessorCustomizer, + closeables); + loggerProviderBuilder = loggerProviderCustomizer.apply(loggerProviderBuilder, config); + SdkLoggerProvider loggerProvider = loggerProviderBuilder.build(); + closeables.add(loggerProvider); + + sdkBuilder + .setTracerProvider(tracerProvider) + .setLoggerProvider(loggerProvider) + .setMeterProvider(meterProvider); + } + + @Nullable + private static AutoConfiguredOpenTelemetrySdk maybeConfigureFromFile( + ConfigProperties config, ComponentLoader componentLoader) { + String otelConfigFile = config.getString("otel.config.file"); + if (otelConfigFile != null && !otelConfigFile.isEmpty()) { + logger.warning( + "otel.config.file was set, but has been replaced with otel.experimental.config.file"); + } + String configurationFile = config.getString("otel.experimental.config.file"); + if (configurationFile == null || configurationFile.isEmpty()) { + return null; + } + if (!INCUBATOR_AVAILABLE) { + throw new ConfigurationException( + "Cannot autoconfigure from config file without opentelemetry-sdk-extension-incubator on the classpath"); + } + return IncubatingUtil.configureFromFile(logger, configurationFile, componentLoader); + } + + private void maybeRegisterShutdownHook(OpenTelemetrySdk openTelemetrySdk) { + if (!registerShutdownHook) { + return; + } + Runtime.getRuntime().addShutdownHook(shutdownHook(openTelemetrySdk)); + } + + private void maybeSetAsGlobal( + OpenTelemetrySdk openTelemetrySdk, @Nullable Object configProvider) { + if (!setResultAsGlobal) { + return; + } + GlobalOpenTelemetry.set(openTelemetrySdk); + if (INCUBATOR_AVAILABLE && configProvider != null) { + IncubatingUtil.setGlobalConfigProvider(configProvider); + } + logger.log( + Level.FINE, "Global OpenTelemetry set to {0} by autoconfiguration", openTelemetrySdk); + } + + // Visible for testing + void callAutoConfigureListeners(SpiHelper spiHelper, OpenTelemetrySdk openTelemetrySdk) { + for (AutoConfigureListener listener : spiHelper.getListeners()) { + try { + listener.afterAutoConfigure(openTelemetrySdk); + } catch (Throwable throwable) { + logger.log( + Level.WARNING, "Error invoking listener " + listener.getClass().getName(), throwable); + } + } + } + @SuppressWarnings("deprecation") // Support deprecated SdkTracerProviderConfigurer - private void mergeSdkTracerProviderConfigurer() { + void mergeSdkTracerProviderConfigurer() { for (io.opentelemetry.sdk.autoconfigure.spi.traces.SdkTracerProviderConfigurer configurer : - spiHelper.load( + componentLoader.load( io.opentelemetry.sdk.autoconfigure.spi.traces.SdkTracerProviderConfigurer.class)) { addTracerProviderCustomizer( (builder, config) -> { @@ -451,7 +624,7 @@ private ConfigProperties computeConfigProperties() { Map overrides = customizer.apply(properties); properties = properties.withOverrides(overrides); } - return properties; + return configPropertiesCustomizer.apply(properties); } // Visible for testing diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/EnvironmentResourceProvider.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/EnvironmentResourceProvider.java similarity index 82% rename from sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/EnvironmentResourceProvider.java rename to sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/EnvironmentResourceProvider.java index 5e1dfcf356c..231967c833c 100644 --- a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/EnvironmentResourceProvider.java +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/EnvironmentResourceProvider.java @@ -3,9 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.sdk.autoconfigure.internal; +package io.opentelemetry.sdk.autoconfigure; -import io.opentelemetry.sdk.autoconfigure.ResourceConfiguration; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; import io.opentelemetry.sdk.autoconfigure.spi.ResourceProvider; import io.opentelemetry.sdk.resources.Resource; @@ -13,6 +12,8 @@ /** * {@link ResourceProvider} for automatically configuring {@link * ResourceConfiguration#createEnvironmentResource(ConfigProperties)}. + * + * @since 1.47.0 */ public final class EnvironmentResourceProvider implements ResourceProvider { @Override @@ -22,7 +23,8 @@ public Resource createResource(ConfigProperties config) { @Override public int order() { - // Environment resource takes precedent over all other ResourceProviders - return Integer.MAX_VALUE; + // Environment resource takes precedent over all other ResourceProviders except + // ServiceInstanceIdResourceProvider. + return Integer.MAX_VALUE - 1; } } diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/IncubatingUtil.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/IncubatingUtil.java new file mode 100644 index 00000000000..a1280e241d4 --- /dev/null +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/IncubatingUtil.java @@ -0,0 +1,88 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.autoconfigure; + +import io.opentelemetry.api.incubator.config.ConfigProvider; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.api.incubator.config.GlobalConfigProvider; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.autoconfigure.internal.ComponentLoader; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.resources.Resource; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.Objects; +import java.util.logging.Logger; + +/** + * Utilities for interacting with incubating components ({@code + * io.opentelemetry:opentelemetry-api-incubator} and {@code + * io.opentelemetry:opentelemetry-sdk-extension-incubator}), which are not guaranteed to be present + * on the classpath. For all methods, callers MUST first separately reflectively confirm that the + * incubator is available on the classpath. + */ +final class IncubatingUtil { + + private IncubatingUtil() {} + + static AutoConfiguredOpenTelemetrySdk configureFromFile( + Logger logger, String configurationFile, ComponentLoader componentLoader) { + logger.fine("Autoconfiguring from configuration file: " + configurationFile); + try (FileInputStream fis = new FileInputStream(configurationFile)) { + Class declarativeConfiguration = + Class.forName( + "io.opentelemetry.sdk.extension.incubator.fileconfig.DeclarativeConfiguration"); + Method parse = declarativeConfiguration.getMethod("parse", InputStream.class); + Object model = parse.invoke(null, fis); + Class openTelemetryConfiguration = + Class.forName( + "io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfigurationModel"); + Method create = + declarativeConfiguration.getMethod( + "create", openTelemetryConfiguration, ComponentLoader.class); + OpenTelemetrySdk sdk = (OpenTelemetrySdk) create.invoke(null, model, componentLoader); + Class sdkConfigProvider = + Class.forName("io.opentelemetry.sdk.extension.incubator.fileconfig.SdkConfigProvider"); + Method createFileConfigProvider = + sdkConfigProvider.getMethod("create", openTelemetryConfiguration); + ConfigProvider configProvider = (ConfigProvider) createFileConfigProvider.invoke(null, model); + // Note: can't access file configuration resource without reflection so setting a dummy + // resource + return AutoConfiguredOpenTelemetrySdk.create( + sdk, Resource.getDefault(), null, configProvider); + } catch (FileNotFoundException e) { + throw new ConfigurationException("Configuration file not found", e); + } catch (ClassNotFoundException | NoSuchMethodException | IllegalAccessException e) { + throw new ConfigurationException( + "Error configuring from file. Is opentelemetry-sdk-extension-incubator on the classpath?", + e); + } catch (InvocationTargetException e) { + Throwable cause = e.getCause(); + if (cause instanceof DeclarativeConfigException) { + throw toConfigurationException((DeclarativeConfigException) cause); + } + throw new ConfigurationException("Unexpected error configuring from file", e); + } catch (IOException e) { + // IOException (other than FileNotFoundException which is caught above) is only thrown + // above by FileInputStream.close() + throw new ConfigurationException("Error closing file", e); + } + } + + private static ConfigurationException toConfigurationException( + DeclarativeConfigException exception) { + String message = Objects.requireNonNull(exception.getMessage()); + return new ConfigurationException(message, exception); + } + + static void setGlobalConfigProvider(Object configProvider) { + GlobalConfigProvider.set((ConfigProvider) configProvider); + } +} diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/LogRecordExporterConfiguration.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/LogRecordExporterConfiguration.java index b0eeb2481c0..db0589ee183 100644 --- a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/LogRecordExporterConfiguration.java +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/LogRecordExporterConfiguration.java @@ -27,8 +27,11 @@ final class LogRecordExporterConfiguration { static { EXPORTER_ARTIFACT_ID_BY_NAME = new HashMap<>(); + EXPORTER_ARTIFACT_ID_BY_NAME.put("console", "opentelemetry-exporter-logging"); EXPORTER_ARTIFACT_ID_BY_NAME.put("logging", "opentelemetry-exporter-logging"); EXPORTER_ARTIFACT_ID_BY_NAME.put("logging-otlp", "opentelemetry-exporter-logging-otlp"); + EXPORTER_ARTIFACT_ID_BY_NAME.put( + "experimental-otlp/stdout", "opentelemetry-exporter-logging-otlp"); EXPORTER_ARTIFACT_ID_BY_NAME.put("otlp", "opentelemetry-exporter-otlp"); } @@ -45,13 +48,7 @@ static Map configureLogRecordExporters( throw new ConfigurationException( "otel.logs.exporter contains " + EXPORTER_NONE + " along with other exporters"); } - LogRecordExporter noop = LogRecordExporter.composite(); - LogRecordExporter customized = logRecordExporterCustomizer.apply(noop, config); - if (customized == noop) { - return Collections.emptyMap(); - } - closeables.add(customized); - return Collections.singletonMap(EXPORTER_NONE, customized); + return Collections.emptyMap(); } if (exporterNames.isEmpty()) { diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/LoggerProviderConfiguration.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/LoggerProviderConfiguration.java index 82ffec2c33e..f762915dc41 100644 --- a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/LoggerProviderConfiguration.java +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/LoggerProviderConfiguration.java @@ -21,6 +21,7 @@ import java.io.Closeable; import java.time.Duration; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -28,6 +29,9 @@ final class LoggerProviderConfiguration { + private static final List simpleProcessorExporterNames = + Arrays.asList("console", "logging"); + static void configureLoggerProvider( SdkLoggerProviderBuilder loggerProviderBuilder, ConfigProperties config, @@ -35,6 +39,8 @@ static void configureLoggerProvider( MeterProvider meterProvider, BiFunction logRecordExporterCustomizer, + BiFunction + logRecordProcessorCustomizer, List closeables) { loggerProviderBuilder.setLogLimits(() -> configureLogLimits(config)); @@ -42,8 +48,15 @@ static void configureLoggerProvider( Map exportersByName = configureLogRecordExporters(config, spiHelper, logRecordExporterCustomizer, closeables); - configureLogRecordProcessors(config, exportersByName, meterProvider, closeables) - .forEach(loggerProviderBuilder::addLogRecordProcessor); + List processors = + configureLogRecordProcessors(config, exportersByName, meterProvider, closeables); + for (LogRecordProcessor processor : processors) { + LogRecordProcessor wrapped = logRecordProcessorCustomizer.apply(processor, config); + if (wrapped != processor) { + closeables.add(wrapped); + } + loggerProviderBuilder.addLogRecordProcessor(wrapped); + } } // Visible for testing @@ -55,11 +68,13 @@ static List configureLogRecordProcessors( Map exportersByNameCopy = new HashMap<>(exportersByName); List logRecordProcessors = new ArrayList<>(); - LogRecordExporter exporter = exportersByNameCopy.remove("logging"); - if (exporter != null) { - LogRecordProcessor logRecordProcessor = SimpleLogRecordProcessor.create(exporter); - closeables.add(logRecordProcessor); - logRecordProcessors.add(logRecordProcessor); + for (String simpleProcessorExporterName : simpleProcessorExporterNames) { + LogRecordExporter exporter = exportersByNameCopy.remove(simpleProcessorExporterName); + if (exporter != null) { + LogRecordProcessor logRecordProcessor = SimpleLogRecordProcessor.create(exporter); + closeables.add(logRecordProcessor); + logRecordProcessors.add(logRecordProcessor); + } } if (!exportersByNameCopy.isEmpty()) { diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/MeterProviderConfiguration.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/MeterProviderConfiguration.java index 525dc54974a..8686c1208c1 100644 --- a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/MeterProviderConfiguration.java +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/MeterProviderConfiguration.java @@ -21,14 +21,19 @@ import java.util.Locale; import java.util.Set; import java.util.function.BiFunction; +import java.util.logging.Logger; import java.util.stream.Collectors; final class MeterProviderConfiguration { + private static final Logger logger = Logger.getLogger(MeterProviderConfiguration.class.getName()); + static void configureMeterProvider( SdkMeterProviderBuilder meterProviderBuilder, ConfigProperties config, SpiHelper spiHelper, + BiFunction + metricReaderCustomizer, BiFunction metricExporterCustomizer, List closeables) { @@ -49,23 +54,33 @@ static void configureMeterProvider( break; } - int cardinalityLimit = - config.getInt( - "otel.experimental.metrics.cardinality.limit", MetricStorage.DEFAULT_MAX_CARDINALITY); - if (cardinalityLimit < 1) { - throw new ConfigurationException("otel.experimental.metrics.cardinality.limit must be >= 1"); + Integer cardinalityLimit = config.getInt("otel.java.metrics.cardinality.limit"); + if (cardinalityLimit == null) { + cardinalityLimit = config.getInt("otel.experimental.metrics.cardinality.limit"); + if (cardinalityLimit != null) { + logger.warning( + "otel.experimental.metrics.cardinality.limit is deprecated and will be removed after 1.51.0 release. Please use otel.java.metrics.cardinality.limit instead."); + } + } + if (cardinalityLimit != null && cardinalityLimit < 1) { + throw new ConfigurationException("otel.java.metrics.cardinality.limit must be >= 1"); } + int resolvedCardinalityLimit = + cardinalityLimit == null ? MetricStorage.DEFAULT_MAX_CARDINALITY : cardinalityLimit; - configureMetricReaders(config, spiHelper, metricExporterCustomizer, closeables) + configureMetricReaders( + config, spiHelper, metricReaderCustomizer, metricExporterCustomizer, closeables) .forEach( reader -> - SdkMeterProviderUtil.registerMetricReaderWithCardinalitySelector( - meterProviderBuilder, reader, instrumentType -> cardinalityLimit)); + meterProviderBuilder.registerMetricReader( + reader, instrumentType -> resolvedCardinalityLimit)); } static List configureMetricReaders( ConfigProperties config, SpiHelper spiHelper, + BiFunction + metricReaderCustomizer, BiFunction metricExporterCustomizer, List closeables) { @@ -85,7 +100,12 @@ static List configureMetricReaders( .map( exporterName -> MetricExporterConfiguration.configureReader( - exporterName, config, spiHelper, metricExporterCustomizer, closeables)) + exporterName, + config, + spiHelper, + metricReaderCustomizer, + metricExporterCustomizer, + closeables)) .collect(Collectors.toList()); } diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/MetricExporterConfiguration.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/MetricExporterConfiguration.java index 2aa968a82bb..c2a63569812 100644 --- a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/MetricExporterConfiguration.java +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/MetricExporterConfiguration.java @@ -30,6 +30,7 @@ final class MetricExporterConfiguration { static { EXPORTER_ARTIFACT_ID_BY_NAME = new HashMap<>(); + EXPORTER_ARTIFACT_ID_BY_NAME.put("console", "opentelemetry-exporter-logging"); EXPORTER_ARTIFACT_ID_BY_NAME.put("logging", "opentelemetry-exporter-logging"); EXPORTER_ARTIFACT_ID_BY_NAME.put("logging-otlp", "opentelemetry-exporter-logging-otlp"); EXPORTER_ARTIFACT_ID_BY_NAME.put("otlp", "opentelemetry-exporter-otlp"); @@ -42,6 +43,8 @@ static MetricReader configureReader( String name, ConfigProperties config, SpiHelper spiHelper, + BiFunction + metricReaderCustomizer, BiFunction metricExporterCustomizer, List closeables) { @@ -56,7 +59,14 @@ static MetricReader configureReader( MetricReader metricReader = configureMetricReader(name, spiMetricReadersManager); if (metricReader != null) { closeables.add(metricReader); - return metricReader; + + // Customize metric reader + MetricReader customizedMetricReader = metricReaderCustomizer.apply(metricReader, config); + if (customizedMetricReader != metricReader) { + closeables.add(customizedMetricReader); + } + + return customizedMetricReader; } // No exporter or reader with the name throw new ConfigurationException("Unrecognized value for otel.metrics.exporter: " + name); @@ -74,7 +84,11 @@ static MetricReader configureReader( .setInterval(config.getDuration("otel.metric.export.interval", DEFAULT_EXPORT_INTERVAL)) .build(); closeables.add(reader); - return reader; + MetricReader customizedMetricReader = metricReaderCustomizer.apply(reader, config); + if (customizedMetricReader != reader) { + closeables.add(customizedMetricReader); + } + return customizedMetricReader; } // Visible for testing diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/ResourceConfiguration.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/ResourceConfiguration.java index b431ad6c8d7..8e00ee19ab2 100644 --- a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/ResourceConfiguration.java +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/ResourceConfiguration.java @@ -21,9 +21,12 @@ import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.BiFunction; +import java.util.logging.Level; +import java.util.logging.Logger; /** * Auto-configuration for the OpenTelemetry {@link Resource}. @@ -32,13 +35,21 @@ */ public final class ResourceConfiguration { + private static final Logger logger = Logger.getLogger(ResourceConfiguration.class.getName()); + private static final AttributeKey SERVICE_NAME = AttributeKey.stringKey("service.name"); - private static final String SCHEMA_URL = "https://opentelemetry.io/schemas/1.21.0"; // Visible for testing static final String ATTRIBUTE_PROPERTY = "otel.resource.attributes"; static final String SERVICE_NAME_PROPERTY = "otel.service.name"; - static final String DISABLED_ATTRIBUTE_KEYS = "otel.experimental.resource.disabled.keys"; + static final String DISABLED_ATTRIBUTE_KEYS = "otel.resource.disabled.keys"; + static final String ENABLED_RESOURCE_PROVIDERS = "otel.java.enabled.resource.providers"; + static final String DISABLED_RESOURCE_PROVIDERS = "otel.java.disabled.resource.providers"; + + private static final String OLD_ENVIRONMENT_DETECTOR_FQCN = + "io.opentelemetry.sdk.autoconfigure.internal.EnvironmentResourceProvider"; + private static final String NEW_ENVIRONMENT_DETECT_FQCN = + EnvironmentResourceProvider.class.getName(); /** * Create a {@link Resource} from the environment. The resource contains attributes parsed from @@ -79,7 +90,7 @@ public static Resource createEnvironmentResource(ConfigProperties config) { resourceAttributes.put(SERVICE_NAME, serviceName); } - return Resource.create(resourceAttributes.build(), SCHEMA_URL); + return Resource.create(resourceAttributes.build()); } static Resource configureResource( @@ -88,10 +99,34 @@ static Resource configureResource( BiFunction resourceCustomizer) { Resource result = Resource.getDefault(); - Set enabledProviders = - new HashSet<>(config.getList("otel.java.enabled.resource.providers")); - Set disabledProviders = - new HashSet<>(config.getList("otel.java.disabled.resource.providers")); + Set enabledProviders = new HashSet<>(config.getList(ENABLED_RESOURCE_PROVIDERS)); + if (enabledProviders.remove(OLD_ENVIRONMENT_DETECTOR_FQCN)) { + logger.log( + Level.WARNING, + "Found reference to " + + OLD_ENVIRONMENT_DETECTOR_FQCN + + " in " + + ENABLED_RESOURCE_PROVIDERS + + ". Please update to " + + NEW_ENVIRONMENT_DETECT_FQCN + + ". Support for the old provider name will be removed after 1.49.0."); + enabledProviders.add(NEW_ENVIRONMENT_DETECT_FQCN); + } + + Set disabledProviders = new HashSet<>(config.getList(DISABLED_RESOURCE_PROVIDERS)); + if (disabledProviders.remove(OLD_ENVIRONMENT_DETECTOR_FQCN)) { + logger.log( + Level.WARNING, + "Found reference to " + + OLD_ENVIRONMENT_DETECTOR_FQCN + + " in " + + DISABLED_RESOURCE_PROVIDERS + + ". Please update to " + + NEW_ENVIRONMENT_DETECT_FQCN + + ". Support for the old provider name will be removed after 1.49.0."); + disabledProviders.add(NEW_ENVIRONMENT_DETECT_FQCN); + } + for (ResourceProvider resourceProvider : spiHelper.loadOrdered(ResourceProvider.class)) { if (!enabledProviders.isEmpty() && !enabledProviders.contains(resourceProvider.getClass().getName())) { @@ -114,7 +149,8 @@ static Resource configureResource( // visible for testing static Resource filterAttributes(Resource resource, ConfigProperties configProperties) { - Set disabledKeys = new HashSet<>(configProperties.getList(DISABLED_ATTRIBUTE_KEYS)); + List disabledAttibuteKeys = configProperties.getList(DISABLED_ATTRIBUTE_KEYS); + Set disabledKeys = new HashSet<>(disabledAttibuteKeys); ResourceBuilder builder = resource.toBuilder().removeIf(attributeKey -> disabledKeys.contains(attributeKey.getKey())); diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/SpanExporterConfiguration.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/SpanExporterConfiguration.java index c5da0e0a045..74e6457125c 100644 --- a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/SpanExporterConfiguration.java +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/SpanExporterConfiguration.java @@ -27,6 +27,7 @@ final class SpanExporterConfiguration { static { EXPORTER_ARTIFACT_ID_BY_NAME = new HashMap<>(); + EXPORTER_ARTIFACT_ID_BY_NAME.put("console", "opentelemetry-exporter-logging"); EXPORTER_ARTIFACT_ID_BY_NAME.put("jaeger", "opentelemetry-exporter-jaeger"); EXPORTER_ARTIFACT_ID_BY_NAME.put("logging", "opentelemetry-exporter-logging"); EXPORTER_ARTIFACT_ID_BY_NAME.put("logging-otlp", "opentelemetry-exporter-logging-otlp"); @@ -47,13 +48,7 @@ static Map configureSpanExporters( throw new ConfigurationException( "otel.traces.exporter contains " + EXPORTER_NONE + " along with other exporters"); } - SpanExporter noop = SpanExporter.composite(); - SpanExporter customized = spanExporterCustomizer.apply(noop, config); - if (customized == noop) { - return Collections.emptyMap(); - } - closeables.add(customized); - return Collections.singletonMap(EXPORTER_NONE, customized); + return Collections.emptyMap(); } if (exporterNames.isEmpty()) { diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/TracerProviderConfiguration.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/TracerProviderConfiguration.java index 78de1700ed9..ee6d75d4c70 100644 --- a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/TracerProviderConfiguration.java +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/TracerProviderConfiguration.java @@ -23,6 +23,7 @@ import java.io.Closeable; import java.time.Duration; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -32,6 +33,8 @@ final class TracerProviderConfiguration { private static final double DEFAULT_TRACEIDRATIO_SAMPLE_RATIO = 1.0d; private static final String PARENTBASED_ALWAYS_ON = "parentbased_always_on"; + private static final List simpleProcessorExporterNames = + Arrays.asList("console", "logging"); static void configureTracerProvider( SdkTracerProviderBuilder tracerProviderBuilder, @@ -40,6 +43,8 @@ static void configureTracerProvider( MeterProvider meterProvider, BiFunction spanExporterCustomizer, + BiFunction + spanProcessorCustomizer, BiFunction samplerCustomizer, List closeables) { @@ -53,8 +58,15 @@ static void configureTracerProvider( SpanExporterConfiguration.configureSpanExporters( config, spiHelper, spanExporterCustomizer, closeables); - configureSpanProcessors(config, exportersByName, meterProvider, closeables) - .forEach(tracerProviderBuilder::addSpanProcessor); + List processors = + configureSpanProcessors(config, exportersByName, meterProvider, closeables); + for (SpanProcessor processor : processors) { + SpanProcessor wrapped = spanProcessorCustomizer.apply(processor, config); + if (wrapped != processor) { + closeables.add(wrapped); + } + tracerProviderBuilder.addSpanProcessor(wrapped); + } } static List configureSpanProcessors( @@ -65,11 +77,13 @@ static List configureSpanProcessors( Map exportersByNameCopy = new HashMap<>(exportersByName); List spanProcessors = new ArrayList<>(); - SpanExporter exporter = exportersByNameCopy.remove("logging"); - if (exporter != null) { - SpanProcessor spanProcessor = SimpleSpanProcessor.create(exporter); - closeables.add(spanProcessor); - spanProcessors.add(spanProcessor); + for (String simpleProcessorExporterNames : simpleProcessorExporterNames) { + SpanExporter exporter = exportersByNameCopy.remove(simpleProcessorExporterNames); + if (exporter != null) { + SpanProcessor spanProcessor = SimpleSpanProcessor.create(exporter); + closeables.add(spanProcessor); + spanProcessors.add(spanProcessor); + } } if (!exportersByNameCopy.isEmpty()) { @@ -164,21 +178,13 @@ static Sampler configureSampler(String sampler, ConfigProperties config, SpiHelp case "always_off": return Sampler.alwaysOff(); case "traceidratio": - { - double ratio = - config.getDouble("otel.traces.sampler.arg", DEFAULT_TRACEIDRATIO_SAMPLE_RATIO); - return Sampler.traceIdRatioBased(ratio); - } + return ratioSampler(config); case PARENTBASED_ALWAYS_ON: return Sampler.parentBased(Sampler.alwaysOn()); case "parentbased_always_off": return Sampler.parentBased(Sampler.alwaysOff()); case "parentbased_traceidratio": - { - double ratio = - config.getDouble("otel.traces.sampler.arg", DEFAULT_TRACEIDRATIO_SAMPLE_RATIO); - return Sampler.parentBased(Sampler.traceIdRatioBased(ratio)); - } + return Sampler.parentBased(ratioSampler(config)); case "parentbased_jaeger_remote": Sampler jaegerRemote = spiSamplersManager.getByName("jaeger_remote"); if (jaegerRemote == null) { @@ -196,5 +202,10 @@ static Sampler configureSampler(String sampler, ConfigProperties config, SpiHelp } } + private static Sampler ratioSampler(ConfigProperties config) { + double ratio = config.getDouble("otel.traces.sampler.arg", DEFAULT_TRACEIDRATIO_SAMPLE_RATIO); + return Sampler.traceIdRatioBased(ratio); + } + private TracerProviderConfiguration() {} } diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/AutoConfigureUtil.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/AutoConfigureUtil.java index e77620fc49a..52f0236e8d2 100644 --- a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/AutoConfigureUtil.java +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/AutoConfigureUtil.java @@ -5,10 +5,14 @@ package io.opentelemetry.sdk.autoconfigure.internal; +import io.opentelemetry.api.incubator.config.ConfigProvider; import io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdk; +import io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdkBuilder; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.util.function.Function; +import javax.annotation.Nullable; /** * This class is internal and is hence not for public use. Its APIs are unstable and can change at @@ -18,7 +22,12 @@ public final class AutoConfigureUtil { private AutoConfigureUtil() {} - /** Returns the {@link ConfigProperties} used for auto-configuration. */ + /** + * Returns the {@link ConfigProperties} used for auto-configuration. + * + * @return the config properties, or {@code null} if declarative configuration is used + */ + @Nullable public static ConfigProperties getConfig( AutoConfiguredOpenTelemetrySdk autoConfiguredOpenTelemetrySdk) { try { @@ -30,4 +39,57 @@ public static ConfigProperties getConfig( "Error calling getConfig on AutoConfiguredOpenTelemetrySdk", e); } } + + /** + * Returns the {@link ConfigProvider} resulting from auto-configuration when declarative + * configuration is used. + * + * @return the {@link ConfigProvider}, or {@code null} if declarative configuration is NOT used + */ + @Nullable + public static ConfigProvider getConfigProvider( + AutoConfiguredOpenTelemetrySdk autoConfiguredOpenTelemetrySdk) { + try { + Method method = AutoConfiguredOpenTelemetrySdk.class.getDeclaredMethod("getConfigProvider"); + method.setAccessible(true); + return (ConfigProvider) method.invoke(autoConfiguredOpenTelemetrySdk); + } catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { + throw new IllegalStateException( + "Error calling getConfigProvider on AutoConfiguredOpenTelemetrySdk", e); + } + } + + /** Sets the {@link ComponentLoader} to be used in the auto-configuration process. */ + public static AutoConfiguredOpenTelemetrySdkBuilder setComponentLoader( + AutoConfiguredOpenTelemetrySdkBuilder builder, ComponentLoader componentLoader) { + try { + Method method = + AutoConfiguredOpenTelemetrySdkBuilder.class.getDeclaredMethod( + "setComponentLoader", ComponentLoader.class); + method.setAccessible(true); + method.invoke(builder, componentLoader); + return builder; + } catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { + throw new IllegalStateException( + "Error calling setComponentLoader on AutoConfiguredOpenTelemetrySdkBuilder", e); + } + } + + /** Sets the {@link ConfigProperties} customizer to be used in the auto-configuration process. */ + public static AutoConfiguredOpenTelemetrySdkBuilder setConfigPropertiesCustomizer( + AutoConfiguredOpenTelemetrySdkBuilder builder, + Function customizer) { + try { + Method method = + AutoConfiguredOpenTelemetrySdkBuilder.class.getDeclaredMethod( + "setConfigPropertiesCustomizer", Function.class); + method.setAccessible(true); + method.invoke(builder, customizer); + return builder; + } catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { + throw new IllegalStateException( + "Error calling setConfigPropertiesCustomizer on AutoConfiguredOpenTelemetrySdkBuilder", + e); + } + } } diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/ComponentLoader.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/ComponentLoader.java new file mode 100644 index 00000000000..49bfc4e52f2 --- /dev/null +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/ComponentLoader.java @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.autoconfigure.internal; + +/** + * A loader for components that are discovered via SPI. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public interface ComponentLoader { + /** + * Load implementations of an SPI. + * + * @param spiClass the SPI class + * @param the SPI type + * @return iterable of SPI implementations + */ + Iterable load(Class spiClass); +} diff --git a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/SpiHelper.java b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/SpiHelper.java index c1d9e7eab84..db4ca4ff64d 100644 --- a/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/SpiHelper.java +++ b/sdk-extensions/autoconfigure/src/main/java/io/opentelemetry/sdk/autoconfigure/internal/SpiHelper.java @@ -7,12 +7,16 @@ import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; import io.opentelemetry.sdk.autoconfigure.spi.Ordered; +import io.opentelemetry.sdk.autoconfigure.spi.internal.AutoConfigureListener; import java.util.ArrayList; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import java.util.ServiceLoader; +import java.util.Set; import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Supplier; @@ -23,18 +27,32 @@ */ public final class SpiHelper { - private final ClassLoader classLoader; - private final SpiFinder spiFinder; + private final ComponentLoader componentLoader; + private final Set listeners = + Collections.newSetFromMap(new IdentityHashMap<>()); - // Visible for testing - SpiHelper(ClassLoader classLoader, SpiFinder spiFinder) { - this.classLoader = classLoader; - this.spiFinder = spiFinder; + private SpiHelper(ComponentLoader componentLoader) { + this.componentLoader = componentLoader; } /** Create a {@link SpiHelper} which loads SPIs using the {@code classLoader}. */ public static SpiHelper create(ClassLoader classLoader) { - return new SpiHelper(classLoader, ServiceLoader::load); + return new SpiHelper(serviceComponentLoader(classLoader)); + } + + /** Create a {@link SpiHelper} which loads SPIs using the {@code componentLoader}. */ + public static SpiHelper create(ComponentLoader componentLoader) { + return new SpiHelper(componentLoader); + } + + /** Create a {@link ComponentLoader} which loads using the {@code classLoader}. */ + public static ComponentLoader serviceComponentLoader(ClassLoader classLoader) { + return new ServiceLoaderComponentLoader(classLoader); + } + + /** Return the backing underlying {@link ComponentLoader}. */ + public ComponentLoader getComponentLoader() { + return componentLoader; } /** @@ -57,7 +75,13 @@ public NamedSpiManager loadConfigurable( Map> nameToProvider = new HashMap<>(); for (S provider : load(spiClass)) { String name = getName.apply(provider); - nameToProvider.put(name, () -> getConfigurable.apply(provider, config)); + nameToProvider.put( + name, + () -> { + T result = getConfigurable.apply(provider, config); + maybeAddListener(result); + return result; + }); } return NamedSpiManager.create(nameToProvider); } @@ -84,14 +108,34 @@ public List loadOrdered(Class spiClass) { */ public List load(Class spiClass) { List result = new ArrayList<>(); - for (T service : spiFinder.load(spiClass, classLoader)) { + for (T service : componentLoader.load(spiClass)) { + maybeAddListener(service); result.add(service); } return result; } - // Visible for testing - interface SpiFinder { - Iterable load(Class spiClass, ClassLoader classLoader); + private void maybeAddListener(Object object) { + if (object instanceof AutoConfigureListener) { + listeners.add((AutoConfigureListener) object); + } + } + + /** Return the set of SPIs loaded which implement {@link AutoConfigureListener}. */ + public Set getListeners() { + return Collections.unmodifiableSet(listeners); + } + + private static class ServiceLoaderComponentLoader implements ComponentLoader { + private final ClassLoader classLoader; + + private ServiceLoaderComponentLoader(ClassLoader classLoader) { + this.classLoader = classLoader; + } + + @Override + public Iterable load(Class spiClass) { + return ServiceLoader.load(spiClass, classLoader); + } } } diff --git a/sdk-extensions/autoconfigure/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.ResourceProvider b/sdk-extensions/autoconfigure/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.ResourceProvider index af0f9919adb..f9bd3554d79 100644 --- a/sdk-extensions/autoconfigure/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.ResourceProvider +++ b/sdk-extensions/autoconfigure/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.ResourceProvider @@ -1 +1 @@ -io.opentelemetry.sdk.autoconfigure.internal.EnvironmentResourceProvider +io.opentelemetry.sdk.autoconfigure.EnvironmentResourceProvider diff --git a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdkTest.java b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdkTest.java index da0451a7144..c7ae65de9a0 100644 --- a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdkTest.java +++ b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdkTest.java @@ -8,6 +8,7 @@ import static io.opentelemetry.api.common.AttributeKey.stringKey; import static java.util.Collections.singletonMap; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doReturn; @@ -17,12 +18,12 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import io.github.netmikey.logunit.api.LogCapturer; import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.events.GlobalEventEmitterProvider; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanId; import io.opentelemetry.api.trace.TraceId; @@ -33,13 +34,18 @@ import io.opentelemetry.context.propagation.TextMapPropagator; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.autoconfigure.internal.AutoConfigureUtil; +import io.opentelemetry.sdk.autoconfigure.internal.ComponentLoader; +import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; +import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.autoconfigure.spi.internal.AutoConfigureListener; +import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.logs.LogRecordProcessor; import io.opentelemetry.sdk.logs.SdkLoggerProvider; import io.opentelemetry.sdk.logs.SdkLoggerProviderBuilder; -import io.opentelemetry.sdk.logs.internal.SdkEventEmitterProvider; import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; import io.opentelemetry.sdk.metrics.export.MetricReader; @@ -50,7 +56,6 @@ import io.opentelemetry.sdk.trace.SdkTracerProviderBuilder; import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; -import io.opentelemetry.sdk.trace.export.SpanExporter; import io.opentelemetry.sdk.trace.samplers.Sampler; import java.io.IOException; import java.math.BigDecimal; @@ -62,6 +67,7 @@ import java.util.Properties; import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; +import java.util.function.Consumer; import java.util.function.Supplier; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -87,8 +93,6 @@ class AutoConfiguredOpenTelemetrySdkTest { @Mock private TextMapGetter> getter; @Mock private Sampler sampler1; @Mock private Sampler sampler2; - @Mock private SpanExporter spanExporter1; - @Mock private SpanExporter spanExporter2; @Mock private MetricReader metricReader; @Mock private LogRecordProcessor logRecordProcessor; @@ -143,7 +147,6 @@ public SdkLoggerProviderBuilder apply( @BeforeEach void resetGlobal() { GlobalOpenTelemetry.resetForTest(); - GlobalEventEmitterProvider.resetForTest(); builder = AutoConfiguredOpenTelemetrySdk.builder() .addPropertiesSupplier(disableExportPropertySupplier()); @@ -238,31 +241,29 @@ void builder_addSamplerCustomizer() { } @Test - void builder_addSpanExporterCustomizer() { - Mockito.lenient().when(spanExporter2.shutdown()).thenReturn(CompletableResultCode.ofSuccess()); + void builder_addAutoConfigurationCustomizerProviderUsingComponentLoader() { + AutoConfigurationCustomizerProvider customizerProvider = + mock(AutoConfigurationCustomizerProvider.class); - SdkTracerProvider sdkTracerProvider = - builder - .addSpanExporterCustomizer( - (previous, config) -> { - assertThat(previous).isSameAs(SpanExporter.composite()); - return spanExporter1; - }) - .addSpanExporterCustomizer( - (previous, config) -> { - assertThat(previous).isSameAs(spanExporter1); - return spanExporter2; - }) - .build() - .getOpenTelemetrySdk() - .getSdkTracerProvider(); + SpiHelper spiHelper = + SpiHelper.create(AutoConfiguredOpenTelemetrySdkBuilder.class.getClassLoader()); - assertThat(sdkTracerProvider) - .extracting("sharedState") - .extracting("activeSpanProcessor") - .extracting("worker") - .extracting("spanExporter") - .isEqualTo(spanExporter2); + AutoConfigureUtil.setComponentLoader( + builder, + new ComponentLoader() { + @SuppressWarnings("unchecked") + @Override + public Iterable load(Class spiClass) { + if (spiClass.equals(AutoConfigurationCustomizerProvider.class)) { + return Collections.singletonList((T) customizerProvider); + } + return spiHelper.load(spiClass); + } + }) + .build(); + + verify(customizerProvider).customize(any()); + verifyNoMoreInteractions(customizerProvider); } @Test @@ -306,6 +307,23 @@ void builder_addPropertiesCustomizer() { assertThat(autoConfigured.getConfig().getString("some.key")).isEqualTo("override-2"); } + @Test + void builder_setConfigPropertiesCustomizer() { + AutoConfiguredOpenTelemetrySdk autoConfigured = + AutoConfigureUtil.setConfigPropertiesCustomizer( + builder.addPropertiesCustomizer(config -> singletonMap("some-key", "defaultValue")), + config -> { + assertThat(config.getString("some-key")).isEqualTo("defaultValue"); + + Map map = new HashMap<>(singletonMap("some-key", "override")); + map.putAll(disableExportPropertySupplier().get()); + return DefaultConfigProperties.createFromMap(map); + }) + .build(); + + assertThat(autoConfigured.getConfig().getString("some.key")).isEqualTo("override"); + } + @Test void builder_addMeterProviderCustomizer() { Mockito.lenient().when(metricReader.shutdown()).thenReturn(CompletableResultCode.ofSuccess()); @@ -325,8 +343,6 @@ void builder_addMeterProviderCustomizer() { verify(metricReader).forceFlush(); } - // TODO: add test for addMetricExporterCustomizer once OTLP export is enabled by default - @Test void builder_addLoggerProviderCustomizer() { Mockito.lenient() @@ -347,8 +363,6 @@ void builder_addLoggerProviderCustomizer() { verify(logRecordProcessor).forceFlush(); } - // TODO: add test for addLogRecordExporterCustomizer once OTLP export is enabled by default - @Test void builder_setResultAsGlobalFalse() { GlobalOpenTelemetry.set(OpenTelemetry.noop()); @@ -356,7 +370,6 @@ void builder_setResultAsGlobalFalse() { OpenTelemetrySdk openTelemetry = builder.build().getOpenTelemetrySdk(); assertThat(GlobalOpenTelemetry.get()).extracting("delegate").isNotSameAs(openTelemetry); - assertThat(GlobalEventEmitterProvider.get()).isNotSameAs(openTelemetry.getSdkLoggerProvider()); } @Test @@ -364,10 +377,6 @@ void builder_setResultAsGlobalTrue() { OpenTelemetrySdk openTelemetry = builder.setResultAsGlobal().build().getOpenTelemetrySdk(); assertThat(GlobalOpenTelemetry.get()).extracting("delegate").isSameAs(openTelemetry); - assertThat(GlobalEventEmitterProvider.get()) - .isInstanceOf(SdkEventEmitterProvider.class) - .extracting("delegateLoggerProvider") - .isSameAs(openTelemetry.getSdkLoggerProvider()); } @Test @@ -383,7 +392,15 @@ void builder_registersShutdownHook() { } @Test - void shutdownHook() throws InterruptedException { + void builder_customizes() { + builder = spy(builder); + OpenTelemetrySdk sdk = builder.build().getOpenTelemetrySdk(); + assertThat(sdk).isNotNull(); + verify(builder, times(1)).mergeSdkTracerProviderConfigurer(); + } + + @Test + void builder_shutdownHook() throws InterruptedException { OpenTelemetrySdk sdk = mock(OpenTelemetrySdk.class); Thread thread = builder.shutdownHook(sdk); @@ -393,6 +410,27 @@ void shutdownHook() throws InterruptedException { verify(sdk).close(); } + @Test + void builder_CallAutoConfigureListeners() { + builder = spy(builder); + + assertThatCode(() -> builder.build()).doesNotThrowAnyException(); + + verify(builder, times(1)).callAutoConfigureListeners(any(), any()); + } + + @Test + void builder_callAutoConfigureListeners() { + AutoConfigureListener listener = mock(AutoConfigureListener.class); + SpiHelper spiHelper = mock(SpiHelper.class); + when(spiHelper.getListeners()).thenReturn(Collections.singleton(listener)); + OpenTelemetrySdk sdk = mock(OpenTelemetrySdk.class); + + builder.callAutoConfigureListeners(spiHelper, sdk); + + verify(listener).afterAutoConfigure(sdk); + } + private static Supplier> disableExportPropertySupplier() { Map props = new HashMap<>(); props.put("otel.metrics.exporter", "none"); @@ -426,6 +464,39 @@ void disableSdk() { verify(logCustomizer, never()).apply(any(), any()); } + @Test + void disableSdk_PropagatorCustomizer() { + Context extracted = Context.root().with(ContextKey.named("animal"), "bear"); + + when(propagator2.extract(any(), any(), any())).thenReturn(extracted); + + AutoConfiguredOpenTelemetrySdk autoConfiguredSdk = + AutoConfiguredOpenTelemetrySdk.builder() + .addPropertiesSupplier(() -> singletonMap("otel.sdk.disabled", "true")) + .addPropertiesSupplier(() -> singletonMap("otel.propagators", "tracecontext")) + .addPropagatorCustomizer( + (previous, config) -> { + assertThat(previous).isSameAs(W3CTraceContextPropagator.getInstance()); + return propagator1; + }) + .addPropagatorCustomizer( + (previous, config) -> { + assertThat(previous).isSameAs(propagator1); + return propagator2; + }) + .build(); + + // When the SDK is disabled, propagators are still configured + assertThat(autoConfiguredSdk.getOpenTelemetrySdk().getPropagators()).isNotNull(); + Consumer propagatorConsumer = + propagator -> { + assertThat(propagator.extract(Context.root(), Collections.emptyMap(), getter)) + .isEqualTo(extracted); + }; + assertThat(autoConfiguredSdk.getOpenTelemetrySdk().getPropagators().getTextMapPropagator()) + .isInstanceOfSatisfying(TextMapPropagator.class, propagatorConsumer); + } + @Test void tracerProviderCustomizer() { InMemorySpanExporter spanExporter = InMemorySpanExporter.create(); @@ -481,6 +552,88 @@ void testNonStringProperties() { }); } + @Test + @SuppressLogger(AutoConfiguredOpenTelemetrySdkBuilder.class) + void configurationError_propagators() { + BiFunction + traceCustomizer = getTracerProviderBuilderSpy(); + BiFunction + metricCustomizer = getMeterProviderBuilderSpy(); + BiFunction logCustomizer = + getLoggerProviderBuilderSpy(); + + assertThatThrownBy( + () -> + // Override the provider builders with mocks which we can verify are closed + AutoConfiguredOpenTelemetrySdk.builder() + .addTracerProviderCustomizer(traceCustomizer) + .addMeterProviderCustomizer(metricCustomizer) + .addLoggerProviderCustomizer(logCustomizer) + .addPropertiesSupplier(() -> singletonMap("otel.metrics.exporter", "none")) + .addPropertiesSupplier(() -> singletonMap("otel.traces.exporter", "none")) + .addPropertiesSupplier(() -> singletonMap("otel.logs.exporter", "none")) + .addPropertiesSupplier(() -> singletonMap("otel.propagators", "foo")) + .addPropertiesSupplier(() -> singletonMap("otel.sdk.disabled", "true")) + .build()) + .isInstanceOf(ConfigurationException.class) + .hasMessageContaining("Unrecognized value for otel.propagators"); + + // When the SDK is disabled and propagators are mis-configured, none of the customizers are + // called + verify(traceCustomizer, never()).apply(any(), any()); + verify(metricCustomizer, never()).apply(any(), any()); + verify(logCustomizer, never()).apply(any(), any()); + + assertThatThrownBy( + () -> + // Override the provider builders with mocks which we can verify are closed + AutoConfiguredOpenTelemetrySdk.builder() + .addTracerProviderCustomizer(traceCustomizer) + .addMeterProviderCustomizer(metricCustomizer) + .addLoggerProviderCustomizer(logCustomizer) + .addPropertiesSupplier(() -> singletonMap("otel.metrics.exporter", "none")) + .addPropertiesSupplier(() -> singletonMap("otel.traces.exporter", "none")) + .addPropertiesSupplier(() -> singletonMap("otel.logs.exporter", "none")) + .addPropertiesSupplier(() -> singletonMap("otel.propagators", "foo")) + .addPropertiesSupplier(() -> singletonMap("otel.sdk.disabled", "false")) + .build()) + .isInstanceOf(ConfigurationException.class) + .hasMessageContaining("Unrecognized value for otel.propagators"); + + // When the SDK is enabled and propagators are mis-configured, none of the customizers are + // called + verify(traceCustomizer, never()).apply(any(), any()); + verify(metricCustomizer, never()).apply(any(), any()); + verify(logCustomizer, never()).apply(any(), any()); + } + + @Test + @SuppressLogger(AutoConfiguredOpenTelemetrySdkBuilder.class) + void configurationError_runtime() { + BiFunction + traceCustomizer = getTracerProviderBuilderSpy(); + BiFunction + metricCustomizer = getMeterProviderBuilderSpy(); + BiFunction logCustomizer = + getLoggerProviderBuilderSpy(); + + doThrow(new RuntimeException()).when(traceCustomizer).apply(any(), any()); + + assertThatThrownBy( + () -> + AutoConfiguredOpenTelemetrySdk.builder() + .addTracerProviderCustomizer(traceCustomizer) + .addMeterProviderCustomizer(metricCustomizer) + .addLoggerProviderCustomizer(logCustomizer) + .addPropertiesSupplier(() -> singletonMap("otel.metrics.exporter", "none")) + .addPropertiesSupplier(() -> singletonMap("otel.traces.exporter", "none")) + .addPropertiesSupplier(() -> singletonMap("otel.logs.exporter", "none")) + .addPropertiesSupplier(() -> singletonMap("otel.sdk.disabled", "false")) + .build()) + .isInstanceOf(ConfigurationException.class) + .hasMessageContaining("Unexpected configuration error"); + } + @Test @SuppressLogger(AutoConfiguredOpenTelemetrySdkBuilder.class) void configurationError_ClosesResources() { @@ -510,15 +663,14 @@ void configurationError_ClosesResources() { .addLoggerProviderCustomizer((u1, u2) -> loggerProviderBuilder) .addPropertiesSupplier(() -> singletonMap("otel.metrics.exporter", "none")) .addPropertiesSupplier(() -> singletonMap("otel.traces.exporter", "none")) - .addPropertiesSupplier(() -> singletonMap("otel.logs.exporter", "none")) - .addPropertiesSupplier(() -> singletonMap("otel.propagators", "foo")) + .addPropertiesSupplier(() -> singletonMap("otel.logs.exporter", "foo")) + .addPropertiesSupplier(() -> singletonMap("otel.sdk.disabled", "false")) .build()) .isInstanceOf(ConfigurationException.class) - .hasMessageContaining("Unrecognized value for otel.propagators"); + .hasMessageContaining("Unrecognized value for otel.logs.exporter: foo"); verify(tracerProvider).close(); verify(meterProvider).close(); - verify(loggerProvider).close(); logs.assertContains("Error closing io.opentelemetry.sdk.trace.SdkTracerProvider: Error!"); } diff --git a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/DeclarativeConfigurationTest.java b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/DeclarativeConfigurationTest.java new file mode 100644 index 00000000000..8dec1132b5c --- /dev/null +++ b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/DeclarativeConfigurationTest.java @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.autoconfigure; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +class DeclarativeConfigurationTest { + + @Test + void configFile(@TempDir Path tempDir) throws IOException { + String yaml = + "file_format: \"0.3\"\n" + + "resource:\n" + + " attributes:\n" + + " - name: service.name\n" + + " value: test\n" + + "tracer_provider:\n" + + " processors:\n" + + " - simple:\n" + + " exporter:\n" + + " console: {}\n"; + Path path = tempDir.resolve("otel-config.yaml"); + Files.write(path, yaml.getBytes(StandardCharsets.UTF_8)); + ConfigProperties config = + DefaultConfigProperties.createFromMap( + Collections.singletonMap("otel.experimental.config.file", path.toString())); + + assertThatThrownBy(() -> AutoConfiguredOpenTelemetrySdk.builder().setConfig(config).build()) + .isInstanceOf(ConfigurationException.class) + .hasMessage( + "Cannot autoconfigure from config file without opentelemetry-sdk-extension-incubator on the classpath"); + } +} diff --git a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/LogRecordExporterConfigurationTest.java b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/LogRecordExporterConfigurationTest.java index 637cb5e80fe..b8577cebf41 100644 --- a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/LogRecordExporterConfigurationTest.java +++ b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/LogRecordExporterConfigurationTest.java @@ -46,6 +46,11 @@ void configureExporter_KnownSpiExportersNotOnClasspath() { .hasMessage( "otel.logs.exporter set to \"logging-otlp\" but opentelemetry-exporter-logging-otlp" + " not found on classpath. Make sure to add it as a dependency."); + assertThatThrownBy(() -> configureExporter("experimental-otlp/stdout", spiExportersManager)) + .isInstanceOf(ConfigurationException.class) + .hasMessage( + "otel.logs.exporter set to \"experimental-otlp/stdout\" but opentelemetry-exporter-logging-otlp" + + " not found on classpath. Make sure to add it as a dependency."); assertThatThrownBy(() -> configureExporter("otlp", spiExportersManager)) .isInstanceOf(ConfigurationException.class) .hasMessage( diff --git a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/LoggerProviderConfigurationTest.java b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/LoggerProviderConfigurationTest.java index 3bf772665b8..f28bb44f3fd 100644 --- a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/LoggerProviderConfigurationTest.java +++ b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/LoggerProviderConfigurationTest.java @@ -48,6 +48,7 @@ void configureLoggerProvider() { SpiHelper.create(LoggerProviderConfiguration.class.getClassLoader()), MeterProvider.noop(), (a, unused) -> a, + (a, unused) -> a, closeables); cleanup.addCloseables(closeables); diff --git a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/MeterProviderConfigurationTest.java b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/MeterProviderConfigurationTest.java index c501d28e3cb..900ba509d71 100644 --- a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/MeterProviderConfigurationTest.java +++ b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/MeterProviderConfigurationTest.java @@ -54,6 +54,7 @@ private static ObjectAssert assertExemplarFilter(Map a, + (a, b) -> a, new ArrayList<>()); return assertThat(builder) .extracting("exemplarFilter", as(InstanceOfAssertFactories.type(ExemplarFilter.class))); diff --git a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/ResourceConfigurationTest.java b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/ResourceConfigurationTest.java index e5f34035c69..e2c94a68aa8 100644 --- a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/ResourceConfigurationTest.java +++ b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/ResourceConfigurationTest.java @@ -26,12 +26,12 @@ class ResourceConfigurationTest { @Test - void customConfigResource() { + void customConfigResourceWithDisabledKeys() { Map props = new HashMap<>(); props.put("otel.service.name", "test-service"); props.put( "otel.resource.attributes", "food=cheesecake,drink=juice,animal= ,color=,shape=square"); - props.put("otel.experimental.resource.disabled-keys", "drink"); + props.put("otel.resource.disabled-keys", "drink"); assertThat( ResourceConfiguration.configureResource( @@ -43,7 +43,6 @@ void customConfigResource() { .put(stringKey("service.name"), "test-service") .put("food", "cheesecake") .put("shape", "square") - .setSchemaUrl("https://opentelemetry.io/schemas/1.21.0") .build()); } diff --git a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/TracerProviderConfigurationTest.java b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/TracerProviderConfigurationTest.java index 3ce674d37e5..e0ddcb438fe 100644 --- a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/TracerProviderConfigurationTest.java +++ b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/TracerProviderConfigurationTest.java @@ -79,6 +79,7 @@ void configureTracerProvider() { MeterProvider.noop(), (a, unused) -> a, (a, unused) -> a, + (a, unused) -> a, closeables); try (SdkTracerProvider tracerProvider = tracerProviderBuilder.build()) { @@ -125,7 +126,7 @@ void configureBatchSpanProcessor_configured() { Map properties = new HashMap<>(); properties.put("otel.bsp.schedule.delay", "100000"); properties.put("otel.bsp.max.queue.size", "2"); - properties.put("otel.bsp.max.export.batch.size", "3"); + properties.put("otel.bsp.max.export.batch.size", "2"); properties.put("otel.bsp.export.timeout", "4"); try (BatchSpanProcessor processor = @@ -143,7 +144,7 @@ void configureBatchSpanProcessor_configured() { assertThat(worker) .extracting("exporterTimeoutNanos") .isEqualTo(TimeUnit.MILLISECONDS.toNanos(4)); - assertThat(worker).extracting("maxExportBatchSize").isEqualTo(3); + assertThat(worker).extracting("maxExportBatchSize").isEqualTo(2); assertThat(worker) .extracting("queue") .isInstanceOfSatisfying( diff --git a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/internal/SpiHelperTest.java b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/internal/SpiHelperTest.java index a2d5c870d4b..ad7a96704e6 100644 --- a/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/internal/SpiHelperTest.java +++ b/sdk-extensions/autoconfigure/src/test/java/io/opentelemetry/sdk/autoconfigure/internal/SpiHelperTest.java @@ -11,6 +11,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -28,11 +29,11 @@ public class SpiHelperTest { @Test public void canRetrieveByName() { - SpiHelper.SpiFinder mockFinder = mock(SpiHelper.SpiFinder.class); - when(mockFinder.load(any(), any())) + ComponentLoader mockLoader = spy(ComponentLoader.class); + when(mockLoader.load(any())) .thenReturn(Collections.singletonList(new SpiExampleProviderImplementation())); - SpiHelper spiHelper = new SpiHelper(SpiHelperTest.class.getClassLoader(), mockFinder); + SpiHelper spiHelper = SpiHelper.create(mockLoader); NamedSpiManager spiProvider = spiHelper.loadConfigurable( @@ -49,10 +50,10 @@ public void canRetrieveByName() { public void instantiatesImplementationsLazily() { SpiExampleProvider mockProvider = mock(SpiExampleProvider.class); when(mockProvider.getName()).thenReturn("lazy-init-example"); - SpiHelper.SpiFinder mockFinder = mock(SpiHelper.SpiFinder.class); - when(mockFinder.load(any(), any())).thenReturn(Collections.singletonList(mockProvider)); + ComponentLoader mockLoader = spy(ComponentLoader.class); + when(mockLoader.load(any())).thenReturn(Collections.singletonList(mockProvider)); - SpiHelper spiHelper = new SpiHelper(SpiHelperTest.class.getClassLoader(), mockFinder); + SpiHelper spiHelper = SpiHelper.create(mockLoader); NamedSpiManager spiProvider = spiHelper.loadConfigurable( @@ -68,11 +69,11 @@ public void instantiatesImplementationsLazily() { @Test public void onlyInstantiatesOnce() { - SpiHelper.SpiFinder mockFinder = mock(SpiHelper.SpiFinder.class); - when(mockFinder.load(any(), any())) + ComponentLoader mockLoader = mock(ComponentLoader.class); + when(mockLoader.load(any())) .thenReturn(Collections.singletonList(new SpiExampleProviderImplementation())); - SpiHelper spiHelper = new SpiHelper(SpiHelperTest.class.getClassLoader(), mockFinder); + SpiHelper spiHelper = SpiHelper.create(mockLoader); NamedSpiManager spiProvider = spiHelper.loadConfigurable( @@ -93,10 +94,10 @@ public void failureToInitializeThrows() { when(mockProvider.getName()).thenReturn("init-failure-example"); when(mockProvider.createSpiExample(any())).thenThrow(new RuntimeException()); - SpiHelper.SpiFinder mockFinder = mock(SpiHelper.SpiFinder.class); - when(mockFinder.load(any(), any())).thenReturn(Collections.singletonList(mockProvider)); + ComponentLoader mockLoader = spy(ComponentLoader.class); + when(mockLoader.load(any())).thenReturn(Collections.singletonList(mockProvider)); - SpiHelper spiHelper = new SpiHelper(SpiHelperTest.class.getClassLoader(), mockFinder); + SpiHelper spiHelper = SpiHelper.create(mockLoader); NamedSpiManager spiProvider = spiHelper.loadConfigurable( @@ -120,11 +121,10 @@ void loadsOrderedSpi() { when(spi2.order()).thenReturn(0); when(spi3.order()).thenReturn(1); - SpiHelper.SpiFinder mockFinder = mock(SpiHelper.SpiFinder.class); - when(mockFinder.load(ResourceProvider.class, SpiHelper.class.getClassLoader())) - .thenReturn(asList(spi1, spi2, spi3)); + ComponentLoader mockLoader = spy(ComponentLoader.class); + when(mockLoader.load(ResourceProvider.class)).thenReturn(asList(spi1, spi2, spi3)); - SpiHelper spiHelper = new SpiHelper(SpiHelperTest.class.getClassLoader(), mockFinder); + SpiHelper spiHelper = SpiHelper.create(mockLoader); List loadedSpi = spiHelper.loadOrdered(ResourceProvider.class); diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdkTest.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdkTest.java index 6bf5c74ccc5..f630b814cb7 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdkTest.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/AutoConfiguredOpenTelemetrySdkTest.java @@ -5,12 +5,15 @@ package io.opentelemetry.sdk.autoconfigure; +import static java.util.Collections.singletonMap; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; +import com.linecorp.armeria.client.WebClient; import io.github.netmikey.logunit.api.LogCapturer; import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.events.GlobalEventEmitterProvider; +import io.opentelemetry.exporter.prometheus.PrometheusHttpServer; import io.opentelemetry.sdk.OpenTelemetrySdk; import java.lang.reflect.Field; import org.junit.jupiter.api.BeforeEach; @@ -28,7 +31,25 @@ class AutoConfiguredOpenTelemetrySdkTest { @BeforeEach void setUp() { GlobalOpenTelemetry.resetForTest(); - GlobalEventEmitterProvider.resetForTest(); + } + + @SuppressWarnings("ResultOfMethodCallIgnored") + @Test + void build_addMetricReaderCustomizerPrometheus() { + AutoConfiguredOpenTelemetrySdkBuilder builder = AutoConfiguredOpenTelemetrySdk.builder(); + builder.addPropertiesSupplier(() -> singletonMap("otel.metrics.exporter", "prometheus")); + + int port = FreePortFinder.getFreePort(); + builder.addMetricReaderCustomizer( + (reader, config) -> { + assertThat(reader).isInstanceOf(PrometheusHttpServer.class); + return PrometheusHttpServer.builder().setPort(port).build(); + }); + + try (OpenTelemetrySdk ignored = builder.build().getOpenTelemetrySdk()) { + WebClient client = WebClient.builder("http://localhost:" + port).build(); + assertThatCode(() -> client.get("/metrics")).doesNotThrowAnyException(); + } } @Test diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ConfigurableLogRecordExporterTest.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ConfigurableLogRecordExporterTest.java index 315925be7f0..7025de0084e 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ConfigurableLogRecordExporterTest.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ConfigurableLogRecordExporterTest.java @@ -9,6 +9,7 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.google.common.collect.ImmutableMap; +import io.opentelemetry.exporter.otlp.internal.OtlpLogRecordExporterProvider; import io.opentelemetry.internal.testing.CleanupExtension; import io.opentelemetry.sdk.autoconfigure.internal.NamedSpiManager; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; @@ -37,12 +38,10 @@ void configureLogRecordExporters_spiExporter() { ImmutableMap.of("test.option", "true", "otel.logs.exporter", "testExporter")); List closeables = new ArrayList<>(); + SpiHelper spiHelper = SpiHelper.create(LogRecordExporterConfiguration.class.getClassLoader()); Map exportersByName = LogRecordExporterConfiguration.configureLogRecordExporters( - config, - SpiHelper.create(LogRecordExporterConfiguration.class.getClassLoader()), - (a, unused) -> a, - closeables); + config, spiHelper, (a, unused) -> a, closeables); cleanup.addCloseables(closeables); assertThat(exportersByName) @@ -55,6 +54,11 @@ void configureLogRecordExporters_spiExporter() { assertThat(closeables) .hasExactlyElementsOfTypes( TestConfigurableLogRecordExporterProvider.TestLogRecordExporter.class); + assertThat(spiHelper.getListeners()) + .satisfiesExactlyInAnyOrder( + listener -> + assertThat(listener).isInstanceOf(TestConfigurableLogRecordExporterProvider.class), + listener -> assertThat(listener).isInstanceOf(OtlpLogRecordExporterProvider.class)); } @Test @@ -64,17 +68,16 @@ void configureLogRecordExporters_emptyClassLoader() { ImmutableMap.of("test.option", "true", "otel.logs.exporter", "testExporter")); List closeables = new ArrayList<>(); + SpiHelper spiHelper = SpiHelper.create(new URLClassLoader(new URL[0], null)); assertThatThrownBy( () -> LogRecordExporterConfiguration.configureLogRecordExporters( - config, - SpiHelper.create(new URLClassLoader(new URL[0], null)), - (a, unused) -> a, - closeables)) + config, spiHelper, (a, unused) -> a, closeables)) .isInstanceOf(ConfigurationException.class) .hasMessageContaining("testExporter"); cleanup.addCloseables(closeables); assertThat(closeables).isEmpty(); + assertThat(spiHelper.getListeners()).isEmpty(); } @Test diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ConfigurableMetricExporterTest.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ConfigurableMetricExporterTest.java index ba18d0c89c3..bc32ebc84a4 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ConfigurableMetricExporterTest.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ConfigurableMetricExporterTest.java @@ -49,6 +49,10 @@ void configureExporter_spiExporter() { .isInstanceOf(TestConfigurableMetricExporterProvider.TestMetricExporter.class) .extracting("config") .isSameAs(config); + assertThat(spiHelper.getListeners()) + .satisfiesExactlyInAnyOrder( + listener -> + assertThat(listener).isInstanceOf(TestConfigurableMetricExporterProvider.class)); } } @@ -83,11 +87,12 @@ void configureMetricReaders_multipleWithNone() { assertThatThrownBy( () -> MeterProviderConfiguration.configureMetricReaders( - config, spiHelper, (a, unused) -> a, closeables)) + config, spiHelper, (a, unused) -> a, (a, unused) -> a, closeables)) .isInstanceOf(ConfigurationException.class) .hasMessageContaining("otel.metrics.exporter contains none along with other exporters"); cleanup.addCloseables(closeables); assertThat(closeables).isEmpty(); + assertThat(spiHelper.getListeners()).isEmpty(); } @Test @@ -97,7 +102,11 @@ void configureMetricReaders_defaultExporter() { List metricReaders = MeterProviderConfiguration.configureMetricReaders( - config, spiHelper, (metricExporter, unused) -> metricExporter, closeables); + config, + spiHelper, + (a, unused) -> a, + (metricExporter, unused) -> metricExporter, + closeables); cleanup.addCloseables(closeables); assertThat(metricReaders) @@ -120,7 +129,11 @@ void configureMetricReaders_multipleExporters() { List metricReaders = MeterProviderConfiguration.configureMetricReaders( - config, spiHelper, (metricExporter, unused) -> metricExporter, closeables); + config, + spiHelper, + (a, unused) -> a, + (metricExporter, unused) -> metricExporter, + closeables); cleanup.addCloseables(closeables); assertThat(metricReaders).hasSize(2).hasOnlyElementsOfType(PeriodicMetricReader.class); diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ConfigurableSpanExporterTest.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ConfigurableSpanExporterTest.java index 89f23078da3..d2c66386092 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ConfigurableSpanExporterTest.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ConfigurableSpanExporterTest.java @@ -11,6 +11,7 @@ import com.google.common.collect.ImmutableMap; import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.exporter.logging.LoggingSpanExporter; +import io.opentelemetry.exporter.otlp.internal.OtlpSpanExporterProvider; import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; import io.opentelemetry.exporter.zipkin.ZipkinSpanExporter; import io.opentelemetry.internal.testing.CleanupExtension; @@ -29,6 +30,7 @@ import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.assertj.core.api.InstanceOfAssertFactories; @@ -63,6 +65,11 @@ void configureSpanExporters_spiExporter() { .isSameAs(config); assertThat(closeables) .hasExactlyElementsOfTypes(TestConfigurableSpanExporterProvider.TestSpanExporter.class); + assertThat(spiHelper.getListeners()) + .satisfiesExactlyInAnyOrder( + listener -> + assertThat(listener).isInstanceOf(TestConfigurableSpanExporterProvider.class), + listener -> assertThat(listener).isInstanceOf(OtlpSpanExporterProvider.class)); } @Test @@ -83,6 +90,7 @@ void configureSpanExporters_emptyClassLoader() { .hasMessageContaining("testExporter"); cleanup.addCloseables(closeables); assertThat(closeables).isEmpty(); + assertThat(spiHelper.getListeners()).isEmpty(); } @Test @@ -130,20 +138,25 @@ void configureExporter_NotFound() { @Test void configureSpanProcessors_simpleSpanProcessor() { - String exporterName = "logging"; List closeables = new ArrayList<>(); + Map exportersByName = new LinkedHashMap<>(); + exportersByName.put("console", LoggingSpanExporter.create()); + exportersByName.put("logging", LoggingSpanExporter.create()); + List spanProcessors = TracerProviderConfiguration.configureSpanProcessors( DefaultConfigProperties.createFromMap( - Collections.singletonMap("otel.traces.exporter", exporterName)), - ImmutableMap.of(exporterName, LoggingSpanExporter.create()), + Collections.singletonMap("otel.traces.exporter", "console,logging")), + exportersByName, MeterProvider.noop(), closeables); cleanup.addCloseables(closeables); - assertThat(spanProcessors).hasExactlyElementsOfTypes(SimpleSpanProcessor.class); - assertThat(closeables).hasExactlyElementsOfTypes(SimpleSpanProcessor.class); + assertThat(spanProcessors) + .hasExactlyElementsOfTypes(SimpleSpanProcessor.class, SimpleSpanProcessor.class); + assertThat(closeables) + .hasExactlyElementsOfTypes(SimpleSpanProcessor.class, SimpleSpanProcessor.class); } @Test diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/FreePortFinder.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/FreePortFinder.java new file mode 100644 index 00000000000..92c2b7640c7 --- /dev/null +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/FreePortFinder.java @@ -0,0 +1,22 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.autoconfigure; + +import java.io.IOException; +import java.net.ServerSocket; + +final class FreePortFinder { + + static int getFreePort() { + try (ServerSocket socket = new ServerSocket(0)) { + return socket.getLocalPort(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private FreePortFinder() {} +} diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/FullConfigTest.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/FullConfigTest.java index 7a1a1f705f5..0a0a905ae8d 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/FullConfigTest.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/FullConfigTest.java @@ -18,8 +18,6 @@ import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.baggage.propagation.W3CBaggagePropagator; import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.events.EventEmitter; -import io.opentelemetry.api.events.GlobalEventEmitterProvider; import io.opentelemetry.api.logs.Logger; import io.opentelemetry.api.logs.Severity; import io.opentelemetry.api.metrics.Meter; @@ -39,8 +37,6 @@ import io.opentelemetry.proto.common.v1.AnyValue; import io.opentelemetry.proto.common.v1.KeyValue; import io.opentelemetry.proto.metrics.v1.Metric; -import io.opentelemetry.proto.metrics.v1.ResourceMetrics; -import io.opentelemetry.proto.metrics.v1.ScopeMetrics; import io.opentelemetry.sdk.OpenTelemetrySdk; import java.util.ArrayList; import java.util.Collection; @@ -54,7 +50,7 @@ import org.junit.jupiter.api.extension.RegisterExtension; @SuppressWarnings("InterruptedExceptionSwallowed") -class FullConfigTest { +public class FullConfigTest { private static final BlockingQueue otlpTraceRequests = new LinkedBlockingDeque<>(); @@ -161,7 +157,6 @@ void setUp() { // Initialize here so we can shutdown when done GlobalOpenTelemetry.resetForTest(); - GlobalEventEmitterProvider.resetForTest(); openTelemetrySdk = AutoConfiguredOpenTelemetrySdk.initialize().getOpenTelemetrySdk(); } @@ -169,7 +164,6 @@ void setUp() { void afterEach() { openTelemetrySdk.close(); GlobalOpenTelemetry.resetForTest(); - GlobalEventEmitterProvider.resetForTest(); } @Test @@ -194,7 +188,6 @@ void configures() throws Exception { .spanBuilder("test") .startSpan() .setAttribute("cat", "meow") - .setAttribute("dog", "bark") .end(); Meter meter = GlobalOpenTelemetry.get().getMeter("test"); @@ -208,125 +201,120 @@ void configures() throws Exception { logger.logRecordBuilder().setBody("debug log message").setSeverity(Severity.DEBUG).emit(); logger.logRecordBuilder().setBody("info log message").setSeverity(Severity.INFO).emit(); - EventEmitter eventEmitter = - GlobalEventEmitterProvider.get() - .eventEmitterBuilder("test") - .setEventDomain("test-domain") - .build(); - eventEmitter.emit("test-name", Attributes.builder().put("cow", "moo").build()); - openTelemetrySdk.getSdkTracerProvider().forceFlush().join(10, TimeUnit.SECONDS); - openTelemetrySdk.getSdkMeterProvider().forceFlush().join(10, TimeUnit.SECONDS); openTelemetrySdk.getSdkLoggerProvider().forceFlush().join(10, TimeUnit.SECONDS); + openTelemetrySdk.getSdkMeterProvider().forceFlush().join(10, TimeUnit.SECONDS); await().untilAsserted(() -> assertThat(otlpTraceRequests).hasSize(1)); ExportTraceServiceRequest traceRequest = otlpTraceRequests.take(); - assertThat(traceRequest.getResourceSpans(0).getResource().getAttributesList()) - .contains( - KeyValue.newBuilder() - .setKey("service.name") - .setValue(AnyValue.newBuilder().setStringValue("test").build()) - .build(), - KeyValue.newBuilder() - .setKey("cat") - .setValue(AnyValue.newBuilder().setStringValue("meow").build()) - .build()); + List spanResourceAttributes = + traceRequest.getResourceSpans(0).getResource().getAttributesList(); + assertHasKeyValue(spanResourceAttributes, "service.name", "test"); + assertHasKeyValue(spanResourceAttributes, "cat", "meow"); io.opentelemetry.proto.trace.v1.Span span = traceRequest.getResourceSpans(0).getScopeSpans(0).getSpans(0); - // Dog dropped by attribute limit. - assertThat(span.getAttributesList()) - .containsExactlyInAnyOrder( - KeyValue.newBuilder() - .setKey("configured") - .setValue(AnyValue.newBuilder().setBoolValue(true).build()) - .build(), - KeyValue.newBuilder() - .setKey("wrapped") - .setValue(AnyValue.newBuilder().setIntValue(1).build()) - .build(), - KeyValue.newBuilder() - .setKey("cat") - .setValue(AnyValue.newBuilder().setStringValue("meow").build()) - .build()); + assertHasKeyValue(span.getAttributesList(), "configured", true); + assertHasKeyValue(span.getAttributesList(), "wrapped", 1); + assertHasKeyValue(span.getAttributesList(), "cat", "meow"); + assertHasKeyValue(span.getAttributesList(), "extra-key", "extra-value"); // await on assertions since metrics may come in different order for BatchSpanProcessor, - // exporter, or the ones we - // created in the test. + // exporter, or the ones we created in the test. await() .untilAsserted( () -> { ExportMetricsServiceRequest metricRequest = otlpMetricsRequests.take(); - assertThat(metricRequest.getResourceMetrics(0).getResource().getAttributesList()) - .contains( - KeyValue.newBuilder() - .setKey("service.name") - .setValue(AnyValue.newBuilder().setStringValue("test").build()) - .build(), - KeyValue.newBuilder() - .setKey("cat") - .setValue(AnyValue.newBuilder().setStringValue("meow").build()) - .build()); - for (ResourceMetrics resourceMetrics : metricRequest.getResourceMetricsList()) { - assertThat(resourceMetrics.getScopeMetricsList()) - .anySatisfy(ilm -> assertThat(ilm.getScope().getName()).isEqualTo("test")); - for (ScopeMetrics instrumentationLibraryMetrics : - resourceMetrics.getScopeMetricsList()) { - for (Metric metric : instrumentationLibraryMetrics.getMetricsList()) { - // SPI was loaded - // MetricExporterCustomizer filters metrics not named my-metric - assertThat(metric.getName()).isEqualTo("my-metric"); - // TestMeterProviderConfigurer configures a view that only passes on attribute - // named allowed - // configured-test - assertThat(getFirstDataPointLabels(metric)) - .contains( - KeyValue.newBuilder() - .setKey("allowed") - .setValue(AnyValue.newBuilder().setStringValue("bear").build()) - .build()); - } - } - } + assertThat(metricRequest.getResourceMetricsList()) + .satisfiesExactly( + resourceMetrics -> { + List metricResourceAttributes = + resourceMetrics.getResource().getAttributesList(); + assertHasKeyValue(metricResourceAttributes, "service.name", "test"); + assertHasKeyValue(metricResourceAttributes, "cat", "meow"); + assertThat(resourceMetrics.getScopeMetricsList()) + .anySatisfy( + scopeMetrics -> { + assertThat(scopeMetrics.getScope().getName()).isEqualTo("test"); + assertThat(scopeMetrics.getMetricsList()) + .satisfiesExactly( + metric -> { + // SPI was loaded + assertThat(metric.getName()).isEqualTo("my-metric"); + // TestMeterProviderConfigurer configures a view that + // only passes an attribute named "allowed" + // configured-test + assertHasKeyValue( + getFirstDataPointLabels(metric), "allowed", "bear"); + }); + }) + // This verifies that AutoConfigureListener was invoked and the OTLP + // span / log exporters received the autoconfigured OpenTelemetrySdk + // instance + .anySatisfy( + scopeMetrics -> { + assertThat(scopeMetrics.getScope().getName()) + .isEqualTo("io.opentelemetry.exporters.otlp-grpc"); + assertThat(scopeMetrics.getMetricsList()) + .satisfiesExactlyInAnyOrder( + metric -> + assertThat(metric.getName()) + .isEqualTo("otlp.exporter.seen"), + metric -> + assertThat(metric.getName()) + .isEqualTo("otlp.exporter.exported")); + }); + }); }); await().untilAsserted(() -> assertThat(otlpLogsRequests).hasSize(1)); ExportLogsServiceRequest logRequest = otlpLogsRequests.take(); - assertThat(logRequest.getResourceLogs(0).getResource().getAttributesList()) - .contains( - KeyValue.newBuilder() - .setKey("service.name") - .setValue(AnyValue.newBuilder().setStringValue("test").build()) - .build(), - KeyValue.newBuilder() - .setKey("cat") - .setValue(AnyValue.newBuilder().setStringValue("meow").build()) - .build()); + List logResourceAttributes = + logRequest.getResourceLogs(0).getResource().getAttributesList(); + assertHasKeyValue(logResourceAttributes, "service.name", "test"); + assertHasKeyValue(logResourceAttributes, "cat", "meow"); assertThat(logRequest.getResourceLogs(0).getScopeLogs(0).getLogRecordsList()) + // LogRecordCustomizer customizes BatchLogProcessor to add an extra attribute on every log + // record + .allSatisfy( + logRecord -> + assertHasKeyValue(logRecord.getAttributesList(), "extra-key", "extra-value")) .satisfiesExactlyInAnyOrder( logRecord -> { - // LogRecordExporterCustomizer filters logs not whose level is less than Severity.INFO + // LogRecordCustomizer filters logs not whose level is less than Severity.INFO assertThat(logRecord.getBody().getStringValue()).isEqualTo("info log message"); assertThat(logRecord.getSeverityNumberValue()) .isEqualTo(Severity.INFO.getSeverityNumber()); - }, - logRecord -> - assertThat(logRecord.getAttributesList()) - .containsExactlyInAnyOrder( - KeyValue.newBuilder() - .setKey("event.domain") - .setValue(AnyValue.newBuilder().setStringValue("test-domain").build()) - .build(), - KeyValue.newBuilder() - .setKey("event.name") - .setValue(AnyValue.newBuilder().setStringValue("test-name").build()) - .build(), - KeyValue.newBuilder() - .setKey("cow") - .setValue(AnyValue.newBuilder().setStringValue("moo").build()) - .build())); + }); + } + + private static void assertHasKeyValue(List keyValues, String key, boolean value) { + assertThat(keyValues) + .contains( + KeyValue.newBuilder() + .setKey(key) + .setValue(AnyValue.newBuilder().setBoolValue(value)) + .build()); + } + + private static void assertHasKeyValue(List keyValues, String key, long value) { + assertThat(keyValues) + .contains( + KeyValue.newBuilder() + .setKey(key) + .setValue(AnyValue.newBuilder().setIntValue(value)) + .build()); + } + + private static void assertHasKeyValue(List keyValues, String key, String value) { + assertThat(keyValues) + .contains( + KeyValue.newBuilder() + .setKey(key) + .setValue(AnyValue.newBuilder().setStringValue(value)) + .build()); } private static List getFirstDataPointLabels(Metric metric) { diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/LogRecordExporterConfigurationTest.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/LogRecordExporterConfigurationTest.java index 9764d7db8b3..9043a70cdc6 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/LogRecordExporterConfigurationTest.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/LogRecordExporterConfigurationTest.java @@ -11,6 +11,7 @@ import com.google.common.collect.ImmutableMap; import io.opentelemetry.exporter.logging.SystemOutLogRecordExporter; import io.opentelemetry.exporter.logging.otlp.OtlpJsonLoggingLogRecordExporter; +import io.opentelemetry.exporter.logging.otlp.internal.logs.OtlpStdoutLogRecordExporter; import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; import io.opentelemetry.sdk.autoconfigure.internal.NamedSpiManager; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; @@ -31,11 +32,17 @@ void configureExporter_KnownSpiExportersOnClasspath() { LogRecordExporterConfiguration.logRecordExporterSpiManager( DefaultConfigProperties.createFromMap(Collections.emptyMap()), spiHelper); + assertThat(LogRecordExporterConfiguration.configureExporter("console", spiExportersManager)) + .isInstanceOf(SystemOutLogRecordExporter.class); assertThat(LogRecordExporterConfiguration.configureExporter("logging", spiExportersManager)) .isInstanceOf(SystemOutLogRecordExporter.class); assertThat( LogRecordExporterConfiguration.configureExporter("logging-otlp", spiExportersManager)) .isInstanceOf(OtlpJsonLoggingLogRecordExporter.class); + assertThat( + LogRecordExporterConfiguration.configureExporter( + "experimental-otlp/stdout", spiExportersManager)) + .isInstanceOf(OtlpStdoutLogRecordExporter.class); assertThat(LogRecordExporterConfiguration.configureExporter("otlp", spiExportersManager)) .isInstanceOf(OtlpGrpcLogRecordExporter.class); } diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/LoggerProviderConfigurationTest.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/LoggerProviderConfigurationTest.java index 0e1fc3e8a83..7a5473e8dea 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/LoggerProviderConfigurationTest.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/LoggerProviderConfigurationTest.java @@ -7,7 +7,6 @@ import static org.assertj.core.api.Assertions.assertThat; -import com.google.common.collect.ImmutableMap; import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.exporter.logging.SystemOutLogRecordExporter; import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; @@ -18,12 +17,14 @@ import io.opentelemetry.sdk.logs.SdkLoggerProvider; import io.opentelemetry.sdk.logs.SdkLoggerProviderBuilder; import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor; import io.opentelemetry.sdk.trace.internal.JcTools; import java.io.Closeable; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Queue; @@ -47,6 +48,7 @@ void configureLoggerProvider() { SpiHelper.create(LoggerProviderConfiguration.class.getClassLoader()), MeterProvider.noop(), (a, unused) -> a, + (a, unused) -> a, closeables); cleanup.addCloseables(closeables); @@ -84,26 +86,31 @@ void configureLoggerProvider() { void configureLogRecordProcessors_multipleExportersWithLogging() { List closeables = new ArrayList<>(); + Map exportersByName = new LinkedHashMap<>(); + exportersByName.put("console", SystemOutLogRecordExporter.create()); + exportersByName.put("logging", SystemOutLogRecordExporter.create()); + exportersByName.put("otlp", OtlpGrpcLogRecordExporter.builder().build()); + List logRecordProcessors = LoggerProviderConfiguration.configureLogRecordProcessors( DefaultConfigProperties.createFromMap(Collections.emptyMap()), - ImmutableMap.of( - "logging", - SystemOutLogRecordExporter.create(), - "otlp", - OtlpGrpcLogRecordExporter.builder().build()), + exportersByName, MeterProvider.noop(), closeables); cleanup.addCloseables(closeables); assertThat(logRecordProcessors) - .hasSize(2) - .hasAtLeastOneElementOfType(SimpleLogRecordProcessor.class) - .hasAtLeastOneElementOfType(BatchLogRecordProcessor.class); + .hasSize(3) + .hasExactlyElementsOfTypes( + SimpleLogRecordProcessor.class, + SimpleLogRecordProcessor.class, + BatchLogRecordProcessor.class); assertThat(closeables) - .hasSize(2) - .hasAtLeastOneElementOfType(SimpleLogRecordProcessor.class) - .hasAtLeastOneElementOfType(BatchLogRecordProcessor.class); + .hasSize(3) + .hasExactlyElementsOfTypes( + SimpleLogRecordProcessor.class, + SimpleLogRecordProcessor.class, + BatchLogRecordProcessor.class); } @Test @@ -111,7 +118,7 @@ void configureBatchLogRecordProcessor() { Map properties = new HashMap<>(); properties.put("otel.blrp.schedule.delay", "100000"); properties.put("otel.blrp.max.queue.size", "2"); - properties.put("otel.blrp.max.export.batch.size", "3"); + properties.put("otel.blrp.max.export.batch.size", "2"); properties.put("otel.blrp.export.timeout", "4"); try (BatchLogRecordProcessor processor = @@ -129,7 +136,7 @@ void configureBatchLogRecordProcessor() { assertThat(worker) .extracting("exporterTimeoutNanos") .isEqualTo(TimeUnit.MILLISECONDS.toNanos(4)); - assertThat(worker).extracting("maxExportBatchSize").isEqualTo(3); + assertThat(worker).extracting("maxExportBatchSize").isEqualTo(2); assertThat(worker) .extracting("queue") .isInstanceOfSatisfying( diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/MeterProviderConfigurationTest.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/MeterProviderConfigurationTest.java index c5df15c73bb..da5ce587d27 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/MeterProviderConfigurationTest.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/MeterProviderConfigurationTest.java @@ -17,8 +17,8 @@ import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; +import io.opentelemetry.sdk.metrics.export.CardinalityLimitSelector; import io.opentelemetry.sdk.metrics.export.MetricReader; -import io.opentelemetry.sdk.metrics.internal.export.CardinalityLimitSelector; import java.io.Closeable; import java.util.ArrayList; import java.util.Collections; @@ -46,14 +46,15 @@ void configureMeterProvider_InvalidCardinalityLimit() { ImmutableMap.of( "otel.metrics.exporter", "logging", - "otel.experimental.metrics.cardinality.limit", + "otel.java.metrics.cardinality.limit", "0")), spiHelper, (a, b) -> a, + (a, b) -> a, closeables); }) .isInstanceOf(ConfigurationException.class) - .hasMessage("otel.experimental.metrics.cardinality.limit must be >= 1"); + .hasMessage("otel.java.metrics.cardinality.limit must be >= 1"); cleanup.addCloseables(closeables); } @@ -69,12 +70,33 @@ void configureMeterProvider_ConfiguresCardinalityLimit() { Collections.singletonMap("otel.metrics.exporter", "logging")), spiHelper, (a, b) -> a, + (a, b) -> a, closeables); cleanup.addCloseables(closeables); assertCardinalityLimit(builder, 2000); // Customized limit cardinality limit to 100 builder = SdkMeterProvider.builder(); + MeterProviderConfiguration.configureMeterProvider( + builder, + DefaultConfigProperties.createFromMap( + ImmutableMap.of( + "otel.metrics.exporter", + "logging", + "otel.java.metrics.cardinality.limit", + "100", + // otel.java.metrics.cardinality.limit takes priority over deprecated property + "otel.experimental.metrics.cardinality.limit", + "200")), + spiHelper, + (a, b) -> a, + (a, b) -> a, + closeables); + cleanup.addCloseables(closeables); + assertCardinalityLimit(builder, 100); + + // Deprecated property + builder = SdkMeterProvider.builder(); MeterProviderConfiguration.configureMeterProvider( builder, DefaultConfigProperties.createFromMap( @@ -85,6 +107,7 @@ void configureMeterProvider_ConfiguresCardinalityLimit() { "100")), spiHelper, (a, b) -> a, + (a, b) -> a, closeables); cleanup.addCloseables(closeables); assertCardinalityLimit(builder, 100); diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/MetricExporterConfigurationTest.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/MetricExporterConfigurationTest.java index 205eb674254..c8bd0937e63 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/MetricExporterConfigurationTest.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/MetricExporterConfigurationTest.java @@ -5,6 +5,7 @@ package io.opentelemetry.sdk.autoconfigure; +import static org.assertj.core.api.Assertions.as; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; @@ -21,11 +22,16 @@ import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; import io.opentelemetry.sdk.metrics.export.MetricExporter; import io.opentelemetry.sdk.metrics.export.MetricReader; +import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; +import io.prometheus.metrics.exporter.httpserver.HTTPServer; import java.io.Closeable; +import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.function.BiFunction; import java.util.stream.Stream; +import org.assertj.core.api.InstanceOfAssertFactories; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; @@ -34,8 +40,9 @@ class MetricExporterConfigurationTest { - private static final ConfigProperties EMPTY = - DefaultConfigProperties.createFromMap(Collections.emptyMap()); + private static final ConfigProperties CONFIG_PROPERTIES = + DefaultConfigProperties.createFromMap( + Collections.singletonMap("otel.exporter.prometheus.port", "0")); @RegisterExtension CleanupExtension cleanup = new CleanupExtension(); @@ -48,19 +55,61 @@ void configureReader_PrometheusOnClasspath() { MetricReader reader = MetricExporterConfiguration.configureReader( - "prometheus", EMPTY, spiHelper, (a, b) -> a, closeables); + "prometheus", CONFIG_PROPERTIES, spiHelper, (a, b) -> a, (a, b) -> a, closeables); cleanup.addCloseables(closeables); assertThat(reader).isInstanceOf(PrometheusHttpServer.class); assertThat(closeables).hasSize(1); } + @Test + void configureReader_customizeReader_prometheus() { + List closeables = new ArrayList<>(); + + int port = FreePortFinder.getFreePort(); + BiFunction readerCustomizer = + (existingReader, config) -> PrometheusHttpServer.builder().setPort(port).build(); + MetricReader reader = + MetricExporterConfiguration.configureReader( + "prometheus", CONFIG_PROPERTIES, spiHelper, readerCustomizer, (a, b) -> a, closeables); + cleanup.addCloseables(closeables); + + assertThat(reader).isInstanceOf(PrometheusHttpServer.class); + assertThat(closeables).hasSize(2); + PrometheusHttpServer prometheusHttpServer = (PrometheusHttpServer) reader; + assertThat(prometheusHttpServer) + .extracting("httpServer", as(InstanceOfAssertFactories.type(HTTPServer.class))) + .satisfies(httpServer -> assertThat(httpServer.getPort()).isEqualTo(port)); + } + + @Test + void configureReader_customizeReader_otlp() { + List closeables = new ArrayList<>(); + + BiFunction readerCustomizer = + (existingReader, config) -> + PeriodicMetricReader.builder(OtlpGrpcMetricExporter.builder().build()) + .setInterval(Duration.ofSeconds(123)) + .build(); + MetricReader reader = + MetricExporterConfiguration.configureReader( + "otlp", CONFIG_PROPERTIES, spiHelper, readerCustomizer, (a, b) -> a, closeables); + cleanup.addCloseables(closeables); + + assertThat(reader).isInstanceOf(PeriodicMetricReader.class); + assertThat(closeables).hasSize(3); + PeriodicMetricReader periodicMetricReader = (PeriodicMetricReader) reader; + assertThat(periodicMetricReader) + .extracting("intervalNanos") + .isEqualTo(Duration.ofSeconds(123).toNanos()); + } + @ParameterizedTest @MethodSource("knownExporters") void configureExporter_KnownSpiExportersOnClasspath( String exporterName, Class expectedExporter) { NamedSpiManager spiExportersManager = - MetricExporterConfiguration.metricExporterSpiManager(EMPTY, spiHelper); + MetricExporterConfiguration.metricExporterSpiManager(CONFIG_PROPERTIES, spiHelper); MetricExporter metricExporter = MetricExporterConfiguration.configureExporter(exporterName, spiExportersManager); @@ -81,7 +130,7 @@ private static Stream knownExporters() { void configureMetricReader_KnownSpiExportersOnClasspath( String exporterName, Class expectedExporter) { NamedSpiManager spiMetricReadersManager = - MetricExporterConfiguration.metricReadersSpiManager(EMPTY, spiHelper); + MetricExporterConfiguration.metricReadersSpiManager(CONFIG_PROPERTIES, spiHelper); MetricReader metricReader = MetricExporterConfiguration.configureMetricReader(exporterName, spiMetricReadersManager); diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ResourceConfigurationTest.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ResourceConfigurationTest.java index 722d80b8704..ae2342aeff9 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ResourceConfigurationTest.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/ResourceConfigurationTest.java @@ -9,31 +9,29 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import io.opentelemetry.sdk.testing.assertj.AttributesAssert; import java.net.URL; import java.net.URLClassLoader; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.function.Consumer; +import java.util.stream.Stream; +import javax.annotation.Nullable; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +@SuppressLogger(ResourceConfiguration.class) class ResourceConfigurationTest { private final SpiHelper spiHelper = SpiHelper.create(ResourceConfigurationTest.class.getClassLoader()); - @Test - void configureResource() { - Attributes attributes = - ResourceConfiguration.configureResource( - DefaultConfigProperties.create(Collections.emptyMap()), spiHelper, (r, c) -> r) - .getAttributes(); - - assertThat(attributes.get(AttributeKey.stringKey("animal"))).isNotNull(); - assertThat(attributes.get(AttributeKey.stringKey("color"))).isNotNull(); - } - @Test void configureResource_EmptyClassLoader() { Attributes attributes = @@ -43,55 +41,126 @@ void configureResource_EmptyClassLoader() { (r, c) -> r) .getAttributes(); + assertThat(attributes.get(AttributeKey.stringKey("service.name"))) + .isEqualTo("unknown_service:java"); + assertThat(attributes.get(AttributeKey.stringKey("cat"))).isNull(); assertThat(attributes.get(AttributeKey.stringKey("animal"))).isNull(); assertThat(attributes.get(AttributeKey.stringKey("color"))).isNull(); } - @Test - void configureResource_OnlyEnabled() { - Map customConfigs = new HashMap<>(1); - customConfigs.put( - "otel.java.enabled.resource.providers", - "io.opentelemetry.sdk.autoconfigure.provider.TestAnimalResourceProvider"); + @ParameterizedTest + @MethodSource("configureResourceArgs") + void configureResource( + @Nullable String enabledProviders, + @Nullable String disabledProviders, + Consumer attributeAssertion) { + // build.gradle.kts sets: + // OTEL_SERVICE_NAME=test + // OTEL_RESOURCE_ATTRIBUTES=cat=meow + Map config = new HashMap<>(); + if (enabledProviders != null) { + config.put("otel.java.enabled.resource.providers", enabledProviders); + } + if (disabledProviders != null) { + config.put("otel.java.disabled.resource.providers", disabledProviders); + } Attributes attributes = ResourceConfiguration.configureResource( - DefaultConfigProperties.create(customConfigs), spiHelper, (r, c) -> r) + DefaultConfigProperties.create(config), spiHelper, (r, c) -> r) .getAttributes(); - assertThat(attributes.get(AttributeKey.stringKey("animal"))).isEqualTo("cat"); - assertThat(attributes.get(AttributeKey.stringKey("color"))).isNull(); + attributeAssertion.accept(assertThat(attributes)); } - @Test - void configureResource_EnabledAndDisabled() { - Map customConfigs = new HashMap<>(2); - customConfigs.put( - "otel.java.enabled.resource.providers", - "io.opentelemetry.sdk.autoconfigure.provider.TestAnimalResourceProvider"); - customConfigs.put( - "otel.java.disabled.resource.providers", - "io.opentelemetry.sdk.extension.resources.TestColorResourceProvider"); - Attributes attributes = - ResourceConfiguration.configureResource( - DefaultConfigProperties.create(customConfigs), spiHelper, (r, c) -> r) - .getAttributes(); - - assertThat(attributes.get(AttributeKey.stringKey("animal"))).isEqualTo("cat"); - assertThat(attributes.get(AttributeKey.stringKey("color"))).isNull(); + private static Stream configureResourceArgs() { + return Stream.of( + // default + Arguments.of( + null, + null, + attributeConsumer( + attr -> attr.containsEntry("service.name", "test").containsEntry("cat", "meow"))), + // only enabled + Arguments.of( + "io.opentelemetry.sdk.autoconfigure.provider.TestAnimalResourceProvider", + null, + attributeConsumer( + attr -> + attr.containsEntry("service.name", "unknown_service:java") + .doesNotContainKey("cat") + .containsEntry("animal", "cat") + .doesNotContainKey("color"))), + // only disabled + Arguments.of( + null, + "io.opentelemetry.sdk.autoconfigure.provider.TestColorResourceProvider", + attributeConsumer( + attr -> + attr.containsEntry("service.name", "test") + .containsEntry("cat", "meow") + .containsEntry("animal", "cat") + .doesNotContainKey("color"))), + // enabled and disabled + Arguments.of( + "io.opentelemetry.sdk.autoconfigure.provider.TestAnimalResourceProvider", + "io.opentelemetry.sdk.autoconfigure.provider.TestColorResourceProvider", + attributeConsumer( + attr -> + attr.containsEntry("service.name", "unknown_service:java") + .doesNotContainKey("cat") + .containsEntry("animal", "cat") + .doesNotContainKey("color"))), + Arguments.of( + "io.opentelemetry.sdk.autoconfigure.provider.TestAnimalResourceProvider", + "io.opentelemetry.sdk.autoconfigure.provider.TestColorResourceProvider,io.opentelemetry.sdk.autoconfigure.provider.TestAnimalResourceProvider", + attributeConsumer( + attr -> + attr.containsEntry("service.name", "unknown_service:java") + .doesNotContainKey("cat") + .doesNotContainKey("animal") + .doesNotContainKey("color"))), + // environment resource provider + Arguments.of( + "io.opentelemetry.sdk.autoconfigure.EnvironmentResourceProvider", + null, + attributeConsumer( + attr -> + attr.containsEntry("service.name", "test") + .containsEntry("cat", "meow") + .doesNotContainKey("animal") + .doesNotContainKey("color"))), + Arguments.of( + null, + "io.opentelemetry.sdk.autoconfigure.EnvironmentResourceProvider", + attributeConsumer( + attr -> + attr.containsEntry("service.name", "unknown_service:java") + .doesNotContainKey("cat") + .containsEntry("animal", "cat") + .containsEntry("color", "blue"))), + // old environment resource provider FQCN + Arguments.of( + "io.opentelemetry.sdk.autoconfigure.internal.EnvironmentResourceProvider", + null, + attributeConsumer( + attr -> + attr.containsEntry("service.name", "test") + .containsEntry("cat", "meow") + .doesNotContainKey("animal") + .doesNotContainKey("color"))), + Arguments.of( + null, + "io.opentelemetry.sdk.autoconfigure.internal.EnvironmentResourceProvider", + attributeConsumer( + attr -> + attr.containsEntry("service.name", "unknown_service:java") + .doesNotContainKey("cat") + .containsEntry("animal", "cat") + .containsEntry("color", "blue")))); } - @Test - void configureResource_OnlyDisabled() { - Map customConfigs = new HashMap<>(1); - customConfigs.put( - "otel.java.disabled.resource.providers", - "io.opentelemetry.sdk.autoconfigure.provider.TestColorResourceProvider"); - Attributes attributes = - ResourceConfiguration.configureResource( - DefaultConfigProperties.create(customConfigs), spiHelper, (r, c) -> r) - .getAttributes(); - - assertThat(attributes.get(AttributeKey.stringKey("animal"))).isEqualTo("cat"); - assertThat(attributes.get(AttributeKey.stringKey("color"))).isNull(); + private static Consumer attributeConsumer( + Consumer attributesAssertConsumer) { + return attributesAssertConsumer; } } diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/SpanExporterConfigurationTest.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/SpanExporterConfigurationTest.java index 86e6bf24354..64e4a55e91a 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/SpanExporterConfigurationTest.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/SpanExporterConfigurationTest.java @@ -28,14 +28,13 @@ class SpanExporterConfigurationTest { SpiHelper.create(SpanExporterConfigurationTest.class.getClassLoader()); @Test - @SuppressWarnings("deprecation") // Testing deprecated jaeger exporter void configureExporter_KnownSpiExportersOnClasspath() { NamedSpiManager spiExportersManager = SpanExporterConfiguration.spanExporterSpiManager( DefaultConfigProperties.createFromMap(Collections.emptyMap()), spiHelper); - assertThat(SpanExporterConfiguration.configureExporter("jaeger", spiExportersManager)) - .isInstanceOf(io.opentelemetry.exporter.jaeger.JaegerGrpcSpanExporter.class); + assertThat(SpanExporterConfiguration.configureExporter("console", spiExportersManager)) + .isInstanceOf(LoggingSpanExporter.class); assertThat(SpanExporterConfiguration.configureExporter("logging", spiExportersManager)) .isInstanceOf(LoggingSpanExporter.class); assertThat(SpanExporterConfiguration.configureExporter("logging-otlp", spiExportersManager)) diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/LogRecordCustomizer.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/LogRecordCustomizer.java new file mode 100644 index 00000000000..78c21104ff2 --- /dev/null +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/LogRecordCustomizer.java @@ -0,0 +1,78 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.autoconfigure.provider; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer; +import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.logs.LogRecordProcessor; +import io.opentelemetry.sdk.logs.ReadWriteLogRecord; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; +import java.util.Collection; +import java.util.stream.Collectors; + +/** Behavior asserted in {@link io.opentelemetry.sdk.autoconfigure.FullConfigTest}. */ +public class LogRecordCustomizer implements AutoConfigurationCustomizerProvider { + @Override + public void customize(AutoConfigurationCustomizer autoConfiguration) { + autoConfiguration.addLogRecordProcessorCustomizer(LogRecordCustomizer::processorCustomizer); + autoConfiguration.addLogRecordExporterCustomizer(LogRecordCustomizer::exporterCustomizer); + } + + private static LogRecordProcessor processorCustomizer( + LogRecordProcessor delegate, ConfigProperties config) { + return new LogRecordProcessor() { + @Override + public void onEmit(Context context, ReadWriteLogRecord logRecord) { + logRecord.setAttribute(AttributeKey.stringKey("extra-key"), "extra-value"); + delegate.onEmit(context, logRecord); + } + + @Override + public CompletableResultCode shutdown() { + return delegate.shutdown(); + } + + @Override + public CompletableResultCode forceFlush() { + return delegate.forceFlush(); + } + }; + } + + private static LogRecordExporter exporterCustomizer( + LogRecordExporter delegate, ConfigProperties config) { + return new LogRecordExporter() { + @Override + public CompletableResultCode export(Collection logs) { + Collection filtered = + logs.stream() + .filter( + log -> + log.getSeverity() == Severity.UNDEFINED_SEVERITY_NUMBER + || log.getSeverity().getSeverityNumber() + >= Severity.INFO.getSeverityNumber()) + .collect(Collectors.toList()); + return delegate.export(filtered); + } + + @Override + public CompletableResultCode flush() { + return delegate.flush(); + } + + @Override + public CompletableResultCode shutdown() { + return delegate.shutdown(); + } + }; + } +} diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/LogRecordExporterCustomizer.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/LogRecordExporterCustomizer.java deleted file mode 100644 index fe6c2930b7e..00000000000 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/LogRecordExporterCustomizer.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.autoconfigure.provider; - -import io.opentelemetry.api.logs.Severity; -import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer; -import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider; -import io.opentelemetry.sdk.common.CompletableResultCode; -import io.opentelemetry.sdk.logs.data.LogRecordData; -import io.opentelemetry.sdk.logs.export.LogRecordExporter; -import java.util.Collection; -import java.util.stream.Collectors; - -public class LogRecordExporterCustomizer implements AutoConfigurationCustomizerProvider { - @Override - public void customize(AutoConfigurationCustomizer autoConfiguration) { - autoConfiguration.addLogRecordExporterCustomizer( - (delegate, config) -> - new LogRecordExporter() { - @Override - public CompletableResultCode export(Collection logs) { - Collection filtered = - logs.stream() - .filter( - log -> - log.getSeverity() == Severity.UNDEFINED_SEVERITY_NUMBER - || log.getSeverity().getSeverityNumber() - >= Severity.INFO.getSeverityNumber()) - .collect(Collectors.toList()); - return delegate.export(filtered); - } - - @Override - public CompletableResultCode flush() { - return delegate.flush(); - } - - @Override - public CompletableResultCode shutdown() { - return delegate.shutdown(); - } - }); - } -} diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/MetricCustomizer.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/MetricCustomizer.java index 979071f23d8..c65434cf908 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/MetricCustomizer.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/MetricCustomizer.java @@ -19,6 +19,7 @@ import java.util.Collection; import java.util.stream.Collectors; +/** Behavior asserted in {@link io.opentelemetry.sdk.autoconfigure.FullConfigTest}. */ public class MetricCustomizer implements AutoConfigurationCustomizerProvider { @Override public void customize(AutoConfigurationCustomizer autoConfiguration) { @@ -48,7 +49,13 @@ public CompletableResultCode export(Collection metrics) { // please configure the SdkMeterProvider with the appropriate view. Collection filtered = metrics.stream() - .filter(metricData -> metricData.getName().equals("my-metric")) + .filter( + metricData -> + metricData.getName().equals("my-metric") + || metricData + .getInstrumentationScopeInfo() + .getName() + .startsWith("io.opentelemetry.exporters")) .collect(Collectors.toList()); return delegate.export(filtered); } diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/SpanCustomizer.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/SpanCustomizer.java new file mode 100644 index 00000000000..5fa8c41b3ca --- /dev/null +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/SpanCustomizer.java @@ -0,0 +1,102 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.autoconfigure.provider; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer; +import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.ReadWriteSpan; +import io.opentelemetry.sdk.trace.ReadableSpan; +import io.opentelemetry.sdk.trace.SpanProcessor; +import io.opentelemetry.sdk.trace.data.DelegatingSpanData; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import java.util.Collection; +import java.util.stream.Collectors; + +/** Behavior asserted in {@link io.opentelemetry.sdk.autoconfigure.FullConfigTest}. */ +public class SpanCustomizer implements AutoConfigurationCustomizerProvider { + @Override + public void customize(AutoConfigurationCustomizer autoConfiguration) { + autoConfiguration.addSpanProcessorCustomizer(SpanCustomizer::processorCustomizer); + autoConfiguration.addSpanExporterCustomizer(SpanCustomizer::exporterCustomizer); + } + + private static SpanProcessor processorCustomizer( + SpanProcessor delegate, ConfigProperties config) { + return new SpanProcessor() { + @Override + public void onStart(Context parentContext, ReadWriteSpan span) { + span.setAttribute(AttributeKey.stringKey("extra-key"), "extra-value"); + if (delegate.isStartRequired()) { + delegate.onStart(parentContext, span); + } + } + + @Override + public boolean isStartRequired() { + return true; + } + + @Override + public void onEnd(ReadableSpan span) { + if (delegate.isEndRequired()) { + delegate.onEnd(span); + } + } + + @Override + public boolean isEndRequired() { + return delegate.isEndRequired(); + } + + @Override + public CompletableResultCode shutdown() { + return delegate.shutdown(); + } + + @Override + public CompletableResultCode forceFlush() { + return delegate.forceFlush(); + } + }; + } + + private static SpanExporter exporterCustomizer(SpanExporter delegate, ConfigProperties config) { + return new SpanExporter() { + @Override + public CompletableResultCode export(Collection spans) { + return delegate.export( + spans.stream() + .map( + span -> + new DelegatingSpanData(span) { + @Override + public Attributes getAttributes() { + return span.getAttributes().toBuilder() + .put("wrapped", config.getInt("otel.test.wrapped")) + .build(); + } + }) + .collect(Collectors.toList())); + } + + @Override + public CompletableResultCode flush() { + return delegate.flush(); + } + + @Override + public CompletableResultCode shutdown() { + return delegate.shutdown(); + } + }; + } +} diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/SpanExporterCustomizer.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/SpanExporterCustomizer.java deleted file mode 100644 index 50f8210ecde..00000000000 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/SpanExporterCustomizer.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.autoconfigure.provider; - -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer; -import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider; -import io.opentelemetry.sdk.common.CompletableResultCode; -import io.opentelemetry.sdk.trace.data.DelegatingSpanData; -import io.opentelemetry.sdk.trace.data.SpanData; -import io.opentelemetry.sdk.trace.export.SpanExporter; -import java.util.Collection; -import java.util.stream.Collectors; - -public class SpanExporterCustomizer implements AutoConfigurationCustomizerProvider { - @Override - public void customize(AutoConfigurationCustomizer autoConfiguration) { - autoConfiguration.addSpanExporterCustomizer( - (delegate, config) -> - new SpanExporter() { - @Override - public CompletableResultCode export(Collection spans) { - return delegate.export( - spans.stream() - .map( - span -> - new DelegatingSpanData(span) { - @Override - public Attributes getAttributes() { - return span.getAttributes().toBuilder() - .put("wrapped", config.getInt("otel.test.wrapped")) - .build(); - } - }) - .collect(Collectors.toList())); - } - - @Override - public CompletableResultCode flush() { - return delegate.flush(); - } - - @Override - public CompletableResultCode shutdown() { - return delegate.shutdown(); - } - }); - } -} diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestConfigurableLogRecordExporterProvider.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestConfigurableLogRecordExporterProvider.java index c1e0f37cadf..2b94eda22ff 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestConfigurableLogRecordExporterProvider.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestConfigurableLogRecordExporterProvider.java @@ -5,7 +5,9 @@ package io.opentelemetry.sdk.autoconfigure.provider; +import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.internal.AutoConfigureListener; import io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.logs.data.LogRecordData; @@ -13,7 +15,7 @@ import java.util.Collection; public class TestConfigurableLogRecordExporterProvider - implements ConfigurableLogRecordExporterProvider { + implements ConfigurableLogRecordExporterProvider, AutoConfigureListener { @Override public LogRecordExporter createExporter(ConfigProperties config) { @@ -25,6 +27,9 @@ public String getName() { return "testExporter"; } + @Override + public void afterAutoConfigure(OpenTelemetrySdk sdk) {} + public static class TestLogRecordExporter implements LogRecordExporter { private final ConfigProperties config; diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestConfigurableMetricExporterProvider.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestConfigurableMetricExporterProvider.java index 59eaf065573..49ed7986477 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestConfigurableMetricExporterProvider.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestConfigurableMetricExporterProvider.java @@ -5,7 +5,9 @@ package io.opentelemetry.sdk.autoconfigure.provider; +import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.internal.AutoConfigureListener; import io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.metrics.InstrumentType; @@ -14,7 +16,8 @@ import io.opentelemetry.sdk.metrics.export.MetricExporter; import java.util.Collection; -public class TestConfigurableMetricExporterProvider implements ConfigurableMetricExporterProvider { +public class TestConfigurableMetricExporterProvider + implements ConfigurableMetricExporterProvider, AutoConfigureListener { @Override public MetricExporter createExporter(ConfigProperties config) { @@ -26,6 +29,9 @@ public String getName() { return "testExporter"; } + @Override + public void afterAutoConfigure(OpenTelemetrySdk sdk) {} + public static class TestMetricExporter implements MetricExporter { private final ConfigProperties config; diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestConfigurableSpanExporterProvider.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestConfigurableSpanExporterProvider.java index 8f62bbddd23..a4c7fef26ab 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestConfigurableSpanExporterProvider.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestConfigurableSpanExporterProvider.java @@ -5,14 +5,17 @@ package io.opentelemetry.sdk.autoconfigure.provider; +import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.internal.AutoConfigureListener; import io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.export.SpanExporter; import java.util.Collection; -public class TestConfigurableSpanExporterProvider implements ConfigurableSpanExporterProvider { +public class TestConfigurableSpanExporterProvider + implements ConfigurableSpanExporterProvider, AutoConfigureListener { @Override public SpanExporter createExporter(ConfigProperties config) { return new TestSpanExporter(config); @@ -23,6 +26,9 @@ public String getName() { return "testExporter"; } + @Override + public void afterAutoConfigure(OpenTelemetrySdk sdk) {} + public static class TestSpanExporter implements SpanExporter { private final ConfigProperties config; diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestTracerProviderConfigurer.java b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestTracerProviderConfigurer.java index 3546b8f8160..31d5e8a2dd7 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestTracerProviderConfigurer.java +++ b/sdk-extensions/autoconfigure/src/testFullConfig/java/io/opentelemetry/sdk/autoconfigure/provider/TestTracerProviderConfigurer.java @@ -5,12 +5,9 @@ package io.opentelemetry.sdk.autoconfigure.provider; -import io.opentelemetry.context.Context; import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; -import io.opentelemetry.sdk.trace.ReadWriteSpan; -import io.opentelemetry.sdk.trace.ReadableSpan; +import io.opentelemetry.sdk.extension.incubator.trace.OnStartSpanProcessor; import io.opentelemetry.sdk.trace.SdkTracerProviderBuilder; -import io.opentelemetry.sdk.trace.SpanProcessor; @SuppressWarnings("deprecation") // Support testing of SdkTracerProviderConfigurer public class TestTracerProviderConfigurer @@ -18,24 +15,8 @@ public class TestTracerProviderConfigurer @Override public void configure(SdkTracerProviderBuilder tracerProvider, ConfigProperties config) { tracerProvider.addSpanProcessor( - new SpanProcessor() { - @Override - public void onStart(Context parentContext, ReadWriteSpan span) { - span.setAttribute("configured", config.getBoolean("otel.test.configured")); - } - - @Override - public boolean isStartRequired() { - return true; - } - - @Override - public void onEnd(ReadableSpan span) {} - - @Override - public boolean isEndRequired() { - return false; - } - }); + OnStartSpanProcessor.create( + (ctx, span) -> + span.setAttribute("configured", config.getBoolean("otel.test.configured")))); } } diff --git a/sdk-extensions/autoconfigure/src/testFullConfig/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider b/sdk-extensions/autoconfigure/src/testFullConfig/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider index 3103a136000..38b64bf91d3 100644 --- a/sdk-extensions/autoconfigure/src/testFullConfig/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider +++ b/sdk-extensions/autoconfigure/src/testFullConfig/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider @@ -1,3 +1,3 @@ -io.opentelemetry.sdk.autoconfigure.provider.SpanExporterCustomizer +io.opentelemetry.sdk.autoconfigure.provider.SpanCustomizer io.opentelemetry.sdk.autoconfigure.provider.MetricCustomizer -io.opentelemetry.sdk.autoconfigure.provider.LogRecordExporterCustomizer \ No newline at end of file +io.opentelemetry.sdk.autoconfigure.provider.LogRecordCustomizer diff --git a/sdk-extensions/autoconfigure/src/testIncubating/java/io/opentelemetry/sdk/autoconfigure/DeclarativeConfigurationTest.java b/sdk-extensions/autoconfigure/src/testIncubating/java/io/opentelemetry/sdk/autoconfigure/DeclarativeConfigurationTest.java new file mode 100644 index 00000000000..22fe2c6ef9f --- /dev/null +++ b/sdk-extensions/autoconfigure/src/testIncubating/java/io/opentelemetry/sdk/autoconfigure/DeclarativeConfigurationTest.java @@ -0,0 +1,251 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.autoconfigure; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static java.util.Collections.singletonMap; +import static org.assertj.core.api.Assertions.assertThatCode; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import io.github.netmikey.logunit.api.LogCapturer; +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.incubator.config.ConfigProvider; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.api.incubator.config.GlobalConfigProvider; +import io.opentelemetry.api.incubator.config.InstrumentationConfigUtil; +import io.opentelemetry.exporter.logging.LoggingSpanExporter; +import io.opentelemetry.internal.testing.CleanupExtension; +import io.opentelemetry.internal.testing.slf4j.SuppressLogger; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.autoconfigure.internal.AutoConfigureUtil; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collections; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.junit.jupiter.api.io.TempDir; +import org.slf4j.event.Level; + +class DeclarativeConfigurationTest { + + @RegisterExtension private static final CleanupExtension cleanup = new CleanupExtension(); + + @RegisterExtension + static final LogCapturer logCapturer = + LogCapturer.create() + .captureForLogger(AutoConfiguredOpenTelemetrySdkBuilder.class.getName(), Level.TRACE); + + @TempDir private Path tempDir; + private Path configFilePath; + + @BeforeEach + void setup() throws IOException { + String yaml = + "file_format: \"0.3\"\n" + + "resource:\n" + + " attributes:\n" + + " - name: service.name\n" + + " value: test\n" + + "tracer_provider:\n" + + " processors:\n" + + " - simple:\n" + + " exporter:\n" + + " console: {}\n" + + "instrumentation:\n" + + " general:\n" + + " http:\n" + + " client:\n" + + " request_captured_headers:\n" + + " - Content-Type\n" + + " - Accept\n" + + " java:\n" + + " example:\n" + + " key: value\n"; + configFilePath = tempDir.resolve("otel-config.yaml"); + Files.write(configFilePath, yaml.getBytes(StandardCharsets.UTF_8)); + GlobalOpenTelemetry.resetForTest(); + GlobalConfigProvider.resetForTest(); + } + + @Test + @SuppressLogger(AutoConfiguredOpenTelemetrySdkBuilder.class) + void configFile_fileNotFound() { + assertThatThrownBy( + () -> + AutoConfiguredOpenTelemetrySdk.builder() + .addPropertiesSupplier(() -> singletonMap("otel.config.file", "foo")) + .addPropertiesSupplier( + () -> singletonMap("otel.experimental.config.file", "foo")) + .addPropertiesSupplier(() -> singletonMap("otel.sdk.disabled", "true")) + .build()) + .isInstanceOf(ConfigurationException.class) + .hasMessageContaining("Configuration file not found"); + + assertThatCode( + () -> + AutoConfiguredOpenTelemetrySdk.builder() + .addPropertiesSupplier(() -> singletonMap("otel.experimental.config.file", "")) + .addPropertiesSupplier(() -> singletonMap("otel.sdk.disabled", "true")) + .build()) + .doesNotThrowAnyException(); + } + + @Test + void configFile_Valid() { + ConfigProperties config = + DefaultConfigProperties.createFromMap( + Collections.singletonMap("otel.experimental.config.file", configFilePath.toString())); + OpenTelemetrySdk expectedSdk = + OpenTelemetrySdk.builder() + .setTracerProvider( + SdkTracerProvider.builder() + .setResource( + Resource.getDefault().toBuilder().put("service.name", "test").build()) + .addSpanProcessor(SimpleSpanProcessor.create(LoggingSpanExporter.create())) + .build()) + .build(); + cleanup.addCloseable(expectedSdk); + AutoConfiguredOpenTelemetrySdkBuilder builder = spy(AutoConfiguredOpenTelemetrySdk.builder()); + Thread thread = new Thread(); + doReturn(thread).when(builder).shutdownHook(any()); + + AutoConfiguredOpenTelemetrySdk autoConfiguredOpenTelemetrySdk = + builder.setConfig(config).build(); + cleanup.addCloseable(autoConfiguredOpenTelemetrySdk.getOpenTelemetrySdk()); + + Assertions.assertThat(autoConfiguredOpenTelemetrySdk.getOpenTelemetrySdk().toString()) + .isEqualTo(expectedSdk.toString()); + // AutoConfiguredOpenTelemetrySdk#getResource() is set to a dummy value when configuring from + // file + Assertions.assertThat(autoConfiguredOpenTelemetrySdk.getResource()) + .isEqualTo(Resource.getDefault()); + verify(builder, times(1)).shutdownHook(autoConfiguredOpenTelemetrySdk.getOpenTelemetrySdk()); + Assertions.assertThat(Runtime.getRuntime().removeShutdownHook(thread)).isTrue(); + logCapturer.assertContains("Autoconfiguring from configuration file: " + configFilePath); + } + + @Test + void configFile_NoShutdownHook() { + ConfigProperties config = + DefaultConfigProperties.createFromMap( + Collections.singletonMap("otel.experimental.config.file", configFilePath.toString())); + AutoConfiguredOpenTelemetrySdkBuilder builder = spy(AutoConfiguredOpenTelemetrySdk.builder()); + + AutoConfiguredOpenTelemetrySdk autoConfiguredOpenTelemetrySdk = + builder.setConfig(config).disableShutdownHook().build(); + cleanup.addCloseable(autoConfiguredOpenTelemetrySdk.getOpenTelemetrySdk()); + + verify(builder, never()).shutdownHook(any()); + } + + @Test + void configFile_setResultAsGlobalFalse() { + GlobalOpenTelemetry.set(OpenTelemetry.noop()); + ConfigProperties config = + DefaultConfigProperties.createFromMap( + Collections.singletonMap("otel.experimental.config.file", configFilePath.toString())); + + AutoConfiguredOpenTelemetrySdk autoConfiguredOpenTelemetrySdk = + AutoConfiguredOpenTelemetrySdk.builder().setConfig(config).build(); + OpenTelemetrySdk openTelemetrySdk = autoConfiguredOpenTelemetrySdk.getOpenTelemetrySdk(); + cleanup.addCloseable(openTelemetrySdk); + + Assertions.assertThat(GlobalOpenTelemetry.get()) + .extracting("delegate") + .isNotSameAs(openTelemetrySdk); + assertThat(GlobalConfigProvider.get()) + .isNotSameAs(autoConfiguredOpenTelemetrySdk.getConfigProvider()); + } + + @Test + void configFile_setResultAsGlobalTrue() { + ConfigProperties config = + DefaultConfigProperties.createFromMap( + Collections.singletonMap("otel.experimental.config.file", configFilePath.toString())); + + AutoConfiguredOpenTelemetrySdk autoConfiguredOpenTelemetrySdk = + AutoConfiguredOpenTelemetrySdk.builder().setConfig(config).setResultAsGlobal().build(); + OpenTelemetrySdk openTelemetrySdk = autoConfiguredOpenTelemetrySdk.getOpenTelemetrySdk(); + cleanup.addCloseable(openTelemetrySdk); + + Assertions.assertThat(GlobalOpenTelemetry.get()) + .extracting("delegate") + .isSameAs(openTelemetrySdk); + assertThat(GlobalConfigProvider.get()) + .isSameAs(autoConfiguredOpenTelemetrySdk.getConfigProvider()); + } + + @Test + void configFile_Error(@TempDir Path tempDir) throws IOException { + String yaml = + "file_format: \"0.3\"\n" + + "resource:\n" + + " attributes:\n" + + " - name: service.name\n" + + " value: test\n" + + "tracer_provider:\n" + + " processors:\n" + + " - simple:\n" + + " exporter:\n" + + " foo: {}\n"; + Path path = tempDir.resolve("otel-config.yaml"); + Files.write(path, yaml.getBytes(StandardCharsets.UTF_8)); + ConfigProperties config = + DefaultConfigProperties.createFromMap( + Collections.singletonMap("otel.experimental.config.file", path.toString())); + + assertThatThrownBy(() -> AutoConfiguredOpenTelemetrySdk.builder().setConfig(config).build()) + .isInstanceOf(ConfigurationException.class) + .hasMessage( + "No component provider detected for io.opentelemetry.sdk.trace.export.SpanExporter with name \"foo\"."); + } + + @Test + void configFile_ConfigProvider() { + ConfigProperties config = + DefaultConfigProperties.createFromMap( + Collections.singletonMap("otel.experimental.config.file", configFilePath.toString())); + + AutoConfiguredOpenTelemetrySdk autoConfiguredOpenTelemetrySdk = + AutoConfiguredOpenTelemetrySdk.builder().setConfig(config).setResultAsGlobal().build(); + OpenTelemetrySdk openTelemetrySdk = autoConfiguredOpenTelemetrySdk.getOpenTelemetrySdk(); + cleanup.addCloseable(openTelemetrySdk); + + // getConfig() should return ExtendedConfigProperties generic representation of the config file + ConfigProvider globalConfigProvider = GlobalConfigProvider.get(); + assertThat(globalConfigProvider) + .isNotNull() + .isSameAs(AutoConfigureUtil.getConfigProvider(autoConfiguredOpenTelemetrySdk)); + DeclarativeConfigProperties instrumentationConfig = + globalConfigProvider.getInstrumentationConfig(); + assertThat(instrumentationConfig).isNotNull(); + + // Extract instrumentation config from ConfigProvider + assertThat(InstrumentationConfigUtil.httpClientRequestCapturedHeaders(globalConfigProvider)) + .isEqualTo(Arrays.asList("Content-Type", "Accept")); + assertThat(InstrumentationConfigUtil.javaInstrumentationConfig(globalConfigProvider, "example")) + .isNotNull() + .satisfies(exampleConfig -> assertThat(exampleConfig.getString("key")).isEqualTo("value")); + } +} diff --git a/sdk-extensions/incubator/README.md b/sdk-extensions/incubator/README.md index 64cc8a7d932..1938182c9e3 100644 --- a/sdk-extensions/incubator/README.md +++ b/sdk-extensions/incubator/README.md @@ -2,6 +2,35 @@ This artifact contains experimental code related to the trace and metric SDKs. +## Declarative Configuration + +The [declarative configuration interface](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/configuration/README.md#declarative-configuration) allows for YAML based file configuration of `OpenTelemetrySdk`. + +Usage: + +```shell +File yamlConfigFile = new File("/path/to/config.yaml"); +OpenTelemetrySdk openTelemetrySdk; +try (FileInputStream yamlConfigFileInputStream = new FileInputStream("/path/to/config.yaml")) { + openTelemetrySdk = FileConfiguration.parseAndCreate(yamlConfigFileInputStream); +} +// ...proceed with application after successful initialization of OpenTelemetrySdk +``` + +Notes: +* Environment variable substitution is supported as [defined in the spec](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/configuration/data-model.md#environment-variable-substitution) +* Currently, there is no support for the customization (i.e. `AutoConfigurationCustomizerProvider`) SPIs defined in [opentelemetry-sdk-extension-autoconfigure-spi](../autoconfigure-spi). +* Custom SDK extension components which reference the [ComponentProvider](https://github.com/open-telemetry/opentelemetry-java/blob/main/sdk-extensions/autoconfigure-spi/src/main/java/io/opentelemetry/sdk/autoconfigure/spi/internal/ComponentProvider.java) SPI can be referenced in declarative configuration. Supported types include: + * `Resource` + * `SpanExporter` + * `MetricExporter` + * `LogRecordExporter` + * `SpanProcessor` + * `LogRecordProcessor` + * `TextMapPropagator` + * `Sampler` +* You can use declarative configuration with [autoconfigure](https://opentelemetry.io/docs/languages/java/configuration/#declarative-configuration) to specify a configuration file via environment variable, e.g. `OTEL_EXPERIMENTAL_CONFIG_FILE=/path/to/config.yaml`. + ## View File Configuration Adds support for file based YAML configuration of Metric SDK Views. diff --git a/sdk-extensions/incubator/build.gradle.kts b/sdk-extensions/incubator/build.gradle.kts index 35a58968fa8..7b89d8e94ec 100644 --- a/sdk-extensions/incubator/build.gradle.kts +++ b/sdk-extensions/incubator/build.gradle.kts @@ -25,7 +25,9 @@ dependencies { implementation("org.snakeyaml:snakeyaml-engine") // io.opentelemetry.sdk.extension.incubator.fileconfig + api(project(":api:incubator")) implementation("com.fasterxml.jackson.core:jackson-databind") + api("com.fasterxml.jackson.core:jackson-annotations") implementation("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml") implementation(project(":sdk-extensions:autoconfigure")) @@ -37,7 +39,6 @@ dependencies { testImplementation(project(":exporters:zipkin")) testImplementation(project(":sdk-extensions:jaeger-remote-sampler")) testImplementation(project(":extensions:trace-propagators")) - // As a part of the tests we check that we can parse examples without error. The https://github.com/open-telemetry/opentelemetry-configuration/blob/main/examples/kitchen-sink.yam contains a reference to the xray propagator testImplementation("io.opentelemetry.contrib:opentelemetry-aws-xray-propagator") testImplementation("com.linecorp.armeria:armeria-junit5") @@ -48,14 +49,18 @@ dependencies { // The sequence of tasks is: // 1. downloadConfigurationSchema - download configuration schema from open-telemetry/opentelemetry-configuration // 2. unzipConfigurationSchema - unzip the configuration schema archive contents to $buildDir/configuration/ -// 3. generateJsonSchema2Pojo - generate java POJOs from the configuration schema -// 4. jsonSchema2PojoPostProcessing - perform various post processing on the generated POJOs, e.g. replace javax.annotation.processing.Generated with javax.annotation.Generated, add @SuppressWarning("rawtypes") annotation -// 5. overwriteJs2p - overwrite original generated classes with versions containing updated @Generated annotation -// 6. deleteJs2pTmp - delete tmp directory +// 3. deleteTypeDescriptions - delete type_descriptions.yaml $buildDir/configuration/schema, which is not part of core schema and causes problems resolving type refs +// 4. generateJsonSchema2Pojo - generate java POJOs from the configuration schema +// 5. jsonSchema2PojoPostProcessing - perform various post processing on the generated POJOs, e.g. replace javax.annotation.processing.Generated with javax.annotation.Generated, add @SuppressWarning("rawtypes") annotation +// 6. overwriteJs2p - overwrite original generated classes with versions containing updated @Generated annotation +// 7. deleteJs2pTmp - delete tmp directory // ... proceed with normal sourcesJar, compileJava, etc -// TODO(jack-berg): update ref to be released version when available -val configurationRef = "0eb96de17c6533f668163873d95bd026bce1d8fb" +// TODO (trask) revert after the 0.4.0 release +// it was needed after 0.3.0 release because file_format in the examples weren't updated prior to the release tag +// val configurationTag = "0.3.0" +// val configurationRef = "refs/tags/v$configurationTag" // Replace with commit SHA to point to experiment with a specific commit +val configurationRef = "cea3905ce0a542d573968c3c47d413143d473cf4" val configurationRepoZip = "https://github.com/open-telemetry/opentelemetry-configuration/archive/$configurationRef.zip" val buildDirectory = layout.buildDirectory.asFile.get() @@ -77,6 +82,11 @@ val unzipConfigurationSchema by tasks.registering(Copy::class) { into("$buildDirectory/configuration/") } +val deleteTypeDescriptions by tasks.registering(Delete::class) { + dependsOn(unzipConfigurationSchema) + delete("$buildDirectory/configuration/schema/type_descriptions.yaml") +} + jsonSchema2Pojo { sourceFiles = setOf(file("$buildDirectory/configuration/schema")) targetDirectory = file("$buildDirectory/generated/sources/js2p/java/main") @@ -99,10 +109,13 @@ jsonSchema2Pojo { // Force java 9+ @Generated annotation, since java 8 @Generated annotation isn't detected by // jsonSchema2Pojo and annotation is skipped altogether targetVersion = "1.9" + + // Append Model as suffix to the generated classes. + classNameSuffix = "Model" } val generateJsonSchema2Pojo = tasks.getByName("generateJsonSchema2Pojo") -generateJsonSchema2Pojo.dependsOn(unzipConfigurationSchema) +generateJsonSchema2Pojo.dependsOn(deleteTypeDescriptions) val jsonSchema2PojoPostProcessing by tasks.registering(Copy::class) { dependsOn(generateJsonSchema2Pojo) diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AggregationFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AggregationFactory.java index 726177d65a6..b856d0da4a5 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AggregationFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AggregationFactory.java @@ -5,17 +5,16 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Aggregation; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Base2ExponentialBucketHistogram; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExplicitBucketHistogram; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AggregationModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Base2ExponentialBucketHistogramModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExplicitBucketHistogramModel; +import io.opentelemetry.sdk.metrics.Aggregation; import java.io.Closeable; import java.util.List; -import javax.annotation.Nullable; -final class AggregationFactory - implements Factory { +final class AggregationFactory implements Factory { private static final AggregationFactory INSTANCE = new AggregationFactory(); @@ -26,22 +25,18 @@ static AggregationFactory getInstance() { } @Override - public io.opentelemetry.sdk.metrics.Aggregation create( - @Nullable Aggregation model, SpiHelper spiHelper, List closeables) { - if (model == null) { - return io.opentelemetry.sdk.metrics.Aggregation.defaultAggregation(); - } - + public Aggregation create( + AggregationModel model, SpiHelper spiHelper, List closeables) { if (model.getDrop() != null) { - return io.opentelemetry.sdk.metrics.Aggregation.drop(); + return Aggregation.drop(); } if (model.getSum() != null) { - return io.opentelemetry.sdk.metrics.Aggregation.sum(); + return Aggregation.sum(); } if (model.getLastValue() != null) { - return io.opentelemetry.sdk.metrics.Aggregation.lastValue(); + return Aggregation.lastValue(); } - Base2ExponentialBucketHistogram exponentialBucketHistogram = + Base2ExponentialBucketHistogramModel exponentialBucketHistogram = model.getBase2ExponentialBucketHistogram(); if (exponentialBucketHistogram != null) { Integer maxScale = exponentialBucketHistogram.getMaxScale(); @@ -53,25 +48,24 @@ public io.opentelemetry.sdk.metrics.Aggregation create( maxSize = 160; } try { - return io.opentelemetry.sdk.metrics.Aggregation.base2ExponentialBucketHistogram( - maxSize, maxScale); + return Aggregation.base2ExponentialBucketHistogram(maxSize, maxScale); } catch (IllegalArgumentException e) { - throw new ConfigurationException("Invalid exponential bucket histogram", e); + throw new DeclarativeConfigException("Invalid exponential bucket histogram", e); } } - ExplicitBucketHistogram explicitBucketHistogram = model.getExplicitBucketHistogram(); + ExplicitBucketHistogramModel explicitBucketHistogram = model.getExplicitBucketHistogram(); if (explicitBucketHistogram != null) { List boundaries = explicitBucketHistogram.getBoundaries(); if (boundaries == null) { - return io.opentelemetry.sdk.metrics.Aggregation.explicitBucketHistogram(); + return Aggregation.explicitBucketHistogram(); } try { - return io.opentelemetry.sdk.metrics.Aggregation.explicitBucketHistogram(boundaries); + return Aggregation.explicitBucketHistogram(boundaries); } catch (IllegalArgumentException e) { - throw new ConfigurationException("Invalid explicit bucket histogram", e); + throw new DeclarativeConfigException("Invalid explicit bucket histogram", e); } } - return io.opentelemetry.sdk.metrics.Aggregation.defaultAggregation(); + return Aggregation.defaultAggregation(); } } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AttributeListFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AttributeListFactory.java new file mode 100644 index 00000000000..c79f1503e35 --- /dev/null +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AttributeListFactory.java @@ -0,0 +1,140 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import static java.util.stream.Collectors.toList; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeNameValueModel; +import java.io.Closeable; +import java.util.List; +import javax.annotation.Nullable; + +final class AttributeListFactory implements Factory, Attributes> { + + private static final AttributeListFactory INSTANCE = new AttributeListFactory(); + + private AttributeListFactory() {} + + static AttributeListFactory getInstance() { + return INSTANCE; + } + + @Override + public Attributes create( + List model, SpiHelper spiHelper, List closeables) { + AttributesBuilder builder = Attributes.builder(); + + for (AttributeNameValueModel nameValueModel : model) { + addToBuilder(nameValueModel, builder); + } + + return builder.build(); + } + + private static void addToBuilder( + AttributeNameValueModel nameValueModel, AttributesBuilder builder) { + String name = FileConfigUtil.requireNonNull(nameValueModel.getName(), "attribute name"); + Object value = FileConfigUtil.requireNonNull(nameValueModel.getValue(), "attribute value"); + AttributeNameValueModel.Type type = nameValueModel.getType(); + if (type == null) { + type = AttributeNameValueModel.Type.STRING; + } + switch (type) { + case STRING: + if (value instanceof String) { + builder.put(name, (String) value); + return; + } + break; + case BOOL: + if (value instanceof Boolean) { + builder.put(name, (boolean) value); + return; + } + break; + case INT: + if ((value instanceof Integer) || (value instanceof Long)) { + builder.put(name, ((Number) value).longValue()); + return; + } + break; + case DOUBLE: + if (value instanceof Number) { + builder.put(name, ((Number) value).doubleValue()); + return; + } + break; + case STRING_ARRAY: + List stringList = checkListOfType(value, String.class); + if (stringList != null) { + builder.put(AttributeKey.stringArrayKey(name), stringList); + return; + } + break; + case BOOL_ARRAY: + List boolList = checkListOfType(value, Boolean.class); + if (boolList != null) { + builder.put(AttributeKey.booleanArrayKey(name), boolList); + return; + } + break; + case INT_ARRAY: + List longList = checkListOfType(value, Long.class); + if (longList != null) { + builder.put(AttributeKey.longArrayKey(name), longList); + return; + } + List intList = checkListOfType(value, Integer.class); + if (intList != null) { + builder.put( + AttributeKey.longArrayKey(name), + intList.stream().map(i -> (long) i).collect(toList())); + return; + } + break; + case DOUBLE_ARRAY: + List doubleList = checkListOfType(value, Double.class); + if (doubleList != null) { + builder.put(AttributeKey.doubleArrayKey(name), doubleList); + return; + } + List floatList = checkListOfType(value, Float.class); + if (floatList != null) { + builder.put( + AttributeKey.doubleArrayKey(name), + floatList.stream().map(i -> (double) i).collect(toList())); + return; + } + break; + } + throw new DeclarativeConfigException( + "Error processing attribute with name \"" + + name + + "\": value did not match type " + + type.name()); + } + + @SuppressWarnings("unchecked") + @Nullable + private static List checkListOfType(Object value, Class expectedType) { + if (!(value instanceof List)) { + return null; + } + List list = (List) value; + if (list.isEmpty()) { + return null; + } + if (!list.stream().allMatch(entry -> expectedType.isAssignableFrom(entry.getClass()))) { + return null; + } + return (List) value; + } +} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AttributesFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AttributesFactory.java deleted file mode 100644 index 22a9273cd8f..00000000000 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AttributesFactory.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.extension.incubator.fileconfig; - -import static io.opentelemetry.api.common.AttributeKey.stringKey; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Attributes; -import java.io.Closeable; -import java.util.List; -import javax.annotation.Nullable; - -final class AttributesFactory - implements Factory { - - private static final AttributesFactory INSTANCE = new AttributesFactory(); - - private AttributesFactory() {} - - static AttributesFactory getInstance() { - return INSTANCE; - } - - @Override - public io.opentelemetry.api.common.Attributes create( - @Nullable Attributes model, SpiHelper spiHelper, List closeables) { - if (model == null) { - return io.opentelemetry.api.common.Attributes.empty(); - } - - AttributesBuilder builder = io.opentelemetry.api.common.Attributes.builder(); - - String serviceName = model.getServiceName(); - if (serviceName != null) { - builder.put(stringKey("service.name"), serviceName); - } - - model - .getAdditionalProperties() - .forEach( - (key, value) -> { - if (value == null) { - throw new ConfigurationException( - "Error processing attribute with key \"" + key + "\": unexpected null value"); - } - if (value instanceof String) { - builder.put(key, (String) value); - return; - } - if (value instanceof Integer) { - builder.put(key, (int) value); - return; - } - if (value instanceof Long) { - builder.put(key, (long) value); - return; - } - if (value instanceof Double) { - builder.put(key, (double) value); - return; - } - if (value instanceof Float) { - builder.put(key, (float) value); - return; - } - if (value instanceof Boolean) { - builder.put(key, (boolean) value); - return; - } - if (value instanceof List) { - List values = (List) value; - if (values.isEmpty()) { - return; - } - Object first = values.get(0); - if (first instanceof String) { - checkAllEntriesOfType(key, values, String.class); - builder.put( - AttributeKey.stringArrayKey(key), - values.stream().map(obj -> (String) obj).toArray(String[]::new)); - return; - } - if (first instanceof Long) { - checkAllEntriesOfType(key, values, Long.class); - builder.put( - AttributeKey.longArrayKey(key), - values.stream().map(obj -> (long) obj).toArray(Long[]::new)); - return; - } - if (first instanceof Integer) { - checkAllEntriesOfType(key, values, Integer.class); - builder.put( - AttributeKey.longArrayKey(key), - values.stream().map(obj -> Long.valueOf((int) obj)).toArray(Long[]::new)); - return; - } - if (first instanceof Double) { - checkAllEntriesOfType(key, values, Double.class); - builder.put( - AttributeKey.doubleArrayKey(key), - values.stream().map(obj -> (double) obj).toArray(Double[]::new)); - return; - } - if (first instanceof Float) { - checkAllEntriesOfType(key, values, Float.class); - builder.put( - AttributeKey.doubleArrayKey(key), - values.stream() - .map(obj -> Double.valueOf((float) obj)) - .toArray(Double[]::new)); - return; - } - if (first instanceof Boolean) { - checkAllEntriesOfType(key, values, Boolean.class); - builder.put( - AttributeKey.booleanArrayKey(key), - values.stream().map(obj -> (Boolean) obj).toArray(Boolean[]::new)); - return; - } - } - throw new ConfigurationException( - "Error processing attribute with key \"" - + key - + "\": unrecognized value type " - + value.getClass().getName()); - }); - - return builder.build(); - } - - private static void checkAllEntriesOfType(String key, List values, Class expectedType) { - values.forEach( - value -> { - if (value == null) { - throw new ConfigurationException( - "Error processing attribute with key \"" - + key - + "\": unexpected null element in value"); - } - if (!expectedType.isAssignableFrom(value.getClass())) { - throw new ConfigurationException( - "Error processing attribute with key \"" - + key - + "\": expected value entries to be of type " - + expectedType - + " but found entry with type " - + value.getClass()); - } - }); - } -} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ConfigurationFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ConfigurationFactory.java deleted file mode 100644 index 1e0dddf3074..00000000000 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ConfigurationFactory.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.extension.incubator.fileconfig; - -import io.opentelemetry.sdk.OpenTelemetrySdk; -import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfiguration; -import java.io.Closeable; -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.List; -import java.util.logging.Logger; - -/** - * Parses YAML configuration files conforming to the schema in open-telemetry/opentelemetry-configuration - * to a {@link OpenTelemetryConfiguration} in-memory representation. Interprets the in-memory - * representation to produce an {@link OpenTelemetrySdk}. - * - * @see #parseAndInterpret(InputStream) - */ -public final class ConfigurationFactory { - - private static final Logger logger = Logger.getLogger(ConfigurationFactory.class.getName()); - - private ConfigurationFactory() {} - - /** - * Parse the {@code inputStream} YAML to {@link OpenTelemetryConfiguration} and interpret the - * model to create {@link OpenTelemetrySdk} instance corresponding to the configuration. - * - * @param inputStream the configuration YAML - * @return the {@link OpenTelemetrySdk} - */ - public static OpenTelemetrySdk parseAndInterpret(InputStream inputStream) { - OpenTelemetryConfiguration model; - try { - model = ConfigurationReader.parse(inputStream); - } catch (RuntimeException e) { - throw new ConfigurationException("Unable to parse inputStream", e); - } - - List closeables = new ArrayList<>(); - try { - return OpenTelemetryConfigurationFactory.getInstance() - .create(model, SpiHelper.create(ConfigurationFactory.class.getClassLoader()), closeables); - } catch (RuntimeException e) { - logger.info( - "Error encountered interpreting configuration. Closing partially configured components."); - for (Closeable closeable : closeables) { - try { - logger.fine("Closing " + closeable.getClass().getName()); - closeable.close(); - } catch (IOException ex) { - logger.warning( - "Error closing " + closeable.getClass().getName() + ": " + ex.getMessage()); - } - } - if (e instanceof ConfigurationException) { - throw e; - } - throw new ConfigurationException("Unexpected configuration error", e); - } - } -} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ConfigurationReader.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ConfigurationReader.java deleted file mode 100644 index 04aaaa894ef..00000000000 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ConfigurationReader.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.extension.incubator.fileconfig; - -import com.fasterxml.jackson.databind.ObjectMapper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfiguration; -import java.io.InputStream; -import org.snakeyaml.engine.v2.api.Load; -import org.snakeyaml.engine.v2.api.LoadSettings; - -final class ConfigurationReader { - - private static final ObjectMapper MAPPER = new ObjectMapper(); - - private ConfigurationReader() {} - - /** Parse the {@code configuration} YAML and return the {@link OpenTelemetryConfiguration}. */ - static OpenTelemetryConfiguration parse(InputStream configuration) { - LoadSettings settings = LoadSettings.builder().build(); - Load yaml = new Load(settings); - Object yamlObj = yaml.loadFromInputStream(configuration); - return MAPPER.convertValue(yamlObj, OpenTelemetryConfiguration.class); - } -} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfiguration.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfiguration.java new file mode 100644 index 00000000000..f19c85116ac --- /dev/null +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfiguration.java @@ -0,0 +1,352 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import com.fasterxml.jackson.annotation.JsonSetter; +import com.fasterxml.jackson.annotation.Nulls; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.autoconfigure.internal.ComponentLoader; +import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfigurationModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SamplerModel; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.io.Closeable; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.logging.Logger; +import java.util.regex.MatchResult; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.snakeyaml.engine.v2.api.Load; +import org.snakeyaml.engine.v2.api.LoadSettings; +import org.snakeyaml.engine.v2.common.ScalarStyle; +import org.snakeyaml.engine.v2.constructor.StandardConstructor; +import org.snakeyaml.engine.v2.exceptions.ConstructorException; +import org.snakeyaml.engine.v2.exceptions.YamlEngineException; +import org.snakeyaml.engine.v2.nodes.MappingNode; +import org.snakeyaml.engine.v2.nodes.Node; +import org.snakeyaml.engine.v2.nodes.NodeTuple; +import org.snakeyaml.engine.v2.nodes.ScalarNode; +import org.snakeyaml.engine.v2.schema.CoreSchema; + +/** + * Configure {@link OpenTelemetrySdk} using declarative + * configuration. For most users, this means calling {@link #parseAndCreate(InputStream)} with a + * YAML + * configuration file. + */ +public final class DeclarativeConfiguration { + + private static final Logger logger = Logger.getLogger(DeclarativeConfiguration.class.getName()); + private static final Pattern ENV_VARIABLE_REFERENCE = + Pattern.compile("\\$\\{([a-zA-Z_][a-zA-Z0-9_]*)(:-([^\n}]*))?}"); + private static final ComponentLoader DEFAULT_COMPONENT_LOADER = + SpiHelper.serviceComponentLoader(DeclarativeConfiguration.class.getClassLoader()); + + private static final ObjectMapper MAPPER; + + static { + MAPPER = + new ObjectMapper() + // Create empty object instances for keys which are present but have null values + .setDefaultSetterInfo(JsonSetter.Value.forValueNulls(Nulls.AS_EMPTY)); + // Boxed primitives which are present but have null values should be set to null, rather than + // empty instances + MAPPER.configOverride(String.class).setSetterInfo(JsonSetter.Value.forValueNulls(Nulls.SET)); + MAPPER.configOverride(Integer.class).setSetterInfo(JsonSetter.Value.forValueNulls(Nulls.SET)); + MAPPER.configOverride(Double.class).setSetterInfo(JsonSetter.Value.forValueNulls(Nulls.SET)); + MAPPER.configOverride(Boolean.class).setSetterInfo(JsonSetter.Value.forValueNulls(Nulls.SET)); + } + + private DeclarativeConfiguration() {} + + /** + * Combines {@link #parse(InputStream)} and {@link #create(OpenTelemetryConfigurationModel)}. + * + * @throws DeclarativeConfigException if unable to parse or interpret + */ + public static OpenTelemetrySdk parseAndCreate(InputStream inputStream) { + OpenTelemetryConfigurationModel configurationModel = parse(inputStream); + return create(configurationModel); + } + + /** + * Interpret the {@code configurationModel} to create {@link OpenTelemetrySdk} instance + * corresponding to the configuration. + * + * @param configurationModel the configuration model + * @return the {@link OpenTelemetrySdk} + * @throws DeclarativeConfigException if unable to interpret + */ + public static OpenTelemetrySdk create(OpenTelemetryConfigurationModel configurationModel) { + return create(configurationModel, DEFAULT_COMPONENT_LOADER); + } + + /** + * Interpret the {@code configurationModel} to create {@link OpenTelemetrySdk} instance + * corresponding to the configuration. + * + * @param configurationModel the configuration model + * @param componentLoader the component loader used to load {@link ComponentProvider} + * implementations + * @return the {@link OpenTelemetrySdk} + * @throws DeclarativeConfigException if unable to interpret + */ + public static OpenTelemetrySdk create( + OpenTelemetryConfigurationModel configurationModel, ComponentLoader componentLoader) { + SpiHelper spiHelper = SpiHelper.create(componentLoader); + + DeclarativeConfigurationBuilder builder = new DeclarativeConfigurationBuilder(); + + for (DeclarativeConfigurationCustomizerProvider provider : + spiHelper.loadOrdered(DeclarativeConfigurationCustomizerProvider.class)) { + provider.customize(builder); + } + + return createAndMaybeCleanup( + OpenTelemetryConfigurationFactory.getInstance(), + spiHelper, + builder.customizeModel(configurationModel)); + } + + /** + * Parse the {@code configuration} YAML and return the {@link OpenTelemetryConfigurationModel}. + * + *

Before parsing, environment variable substitution is performed as described in {@link + * EnvSubstitutionConstructor}. + * + * @throws DeclarativeConfigException if unable to parse + */ + public static OpenTelemetryConfigurationModel parse(InputStream configuration) { + try { + return parse(configuration, System.getenv()); + } catch (RuntimeException e) { + throw new DeclarativeConfigException("Unable to parse configuration input stream", e); + } + } + + // Visible for testing + static OpenTelemetryConfigurationModel parse( + InputStream configuration, Map environmentVariables) { + Object yamlObj = loadYaml(configuration, environmentVariables); + return MAPPER.convertValue(yamlObj, OpenTelemetryConfigurationModel.class); + } + + // Visible for testing + static Object loadYaml(InputStream inputStream, Map environmentVariables) { + LoadSettings settings = LoadSettings.builder().setSchema(new CoreSchema()).build(); + Load yaml = new Load(settings, new EnvSubstitutionConstructor(settings, environmentVariables)); + return yaml.loadFromInputStream(inputStream); + } + + /** + * Convert the {@code model} to a generic {@link DeclarativeConfigProperties}. + * + * @param model the configuration model + * @return a generic {@link DeclarativeConfigProperties} representation of the model + */ + public static DeclarativeConfigProperties toConfigProperties( + OpenTelemetryConfigurationModel model) { + return toConfigProperties(model, DEFAULT_COMPONENT_LOADER); + } + + /** + * Convert the {@code configuration} YAML to a generic {@link DeclarativeConfigProperties}. + * + * @param configuration configuration YAML + * @return a generic {@link DeclarativeConfigProperties} representation of the model + */ + public static DeclarativeConfigProperties toConfigProperties(InputStream configuration) { + Object yamlObj = loadYaml(configuration, System.getenv()); + return toConfigProperties(yamlObj, DEFAULT_COMPONENT_LOADER); + } + + static DeclarativeConfigProperties toConfigProperties( + Object model, ComponentLoader componentLoader) { + Map configurationMap = + MAPPER.convertValue(model, new TypeReference>() {}); + if (configurationMap == null) { + configurationMap = Collections.emptyMap(); + } + return YamlDeclarativeConfigProperties.create(configurationMap, componentLoader); + } + + /** + * Create a {@link SamplerModel} from the {@code samplerModel} representing the sampler config. + * + *

This is used when samplers are composed, with one sampler accepting one or more additional + * samplers as config properties. The {@link ComponentProvider} implementation can call this to + * configure a delegate {@link SamplerModel} from the {@link DeclarativeConfigProperties} + * corresponding to a particular config property. + */ + // TODO(jack-berg): add create methods for all SDK extension components supported by + // ComponentProvider + public static Sampler createSampler(DeclarativeConfigProperties genericSamplerModel) { + YamlDeclarativeConfigProperties yamlDeclarativeConfigProperties = + requireYamlDeclarativeConfigProperties(genericSamplerModel); + SamplerModel samplerModel = convertToModel(yamlDeclarativeConfigProperties, SamplerModel.class); + return createAndMaybeCleanup( + SamplerFactory.getInstance(), + SpiHelper.create(yamlDeclarativeConfigProperties.getComponentLoader()), + samplerModel); + } + + private static YamlDeclarativeConfigProperties requireYamlDeclarativeConfigProperties( + DeclarativeConfigProperties declarativeConfigProperties) { + if (!(declarativeConfigProperties instanceof YamlDeclarativeConfigProperties)) { + throw new DeclarativeConfigException( + "Only YamlDeclarativeConfigProperties can be converted to model"); + } + return (YamlDeclarativeConfigProperties) declarativeConfigProperties; + } + + static T convertToModel( + YamlDeclarativeConfigProperties yamlDeclarativeConfigProperties, Class modelType) { + return MAPPER.convertValue(yamlDeclarativeConfigProperties.toMap(), modelType); + } + + static R createAndMaybeCleanup(Factory factory, SpiHelper spiHelper, M model) { + List closeables = new ArrayList<>(); + try { + return factory.create(model, spiHelper, closeables); + } catch (RuntimeException e) { + logger.info("Error encountered interpreting model. Closing partially configured components."); + for (Closeable closeable : closeables) { + try { + logger.fine("Closing " + closeable.getClass().getName()); + closeable.close(); + } catch (IOException ex) { + logger.warning( + "Error closing " + closeable.getClass().getName() + ": " + ex.getMessage()); + } + } + if (e instanceof DeclarativeConfigException) { + throw e; + } + throw new DeclarativeConfigException("Unexpected configuration error", e); + } + } + + /** + * {@link StandardConstructor} which substitutes environment variables. + * + *

Environment variables follow the syntax {@code ${VARIABLE}}, where {@code VARIABLE} is an + * environment variable matching the regular expression {@code [a-zA-Z_]+[a-zA-Z0-9_]*}. + * + *

Environment variable substitution only takes place on scalar values of maps. References to + * environment variables in keys or sets are ignored. + * + *

If a referenced environment variable is not defined, it is replaced with {@code ""}. + */ + private static final class EnvSubstitutionConstructor extends StandardConstructor { + + // Load is not thread safe but this instance is always used on the same thread + private final Load load; + private final Map environmentVariables; + + private EnvSubstitutionConstructor( + LoadSettings loadSettings, Map environmentVariables) { + super(loadSettings); + load = new Load(loadSettings); + this.environmentVariables = environmentVariables; + } + + /** + * Implementation is same as {@link + * org.snakeyaml.engine.v2.constructor.BaseConstructor#constructMapping(MappingNode)} except we + * override the resolution of values with our custom {@link #constructValueObject(Node)}, which + * performs environment variable substitution. + */ + @Override + @SuppressWarnings({"ReturnValueIgnored", "CatchingUnchecked"}) + protected Map constructMapping(MappingNode node) { + Map mapping = settings.getDefaultMap().apply(node.getValue().size()); + List nodeValue = node.getValue(); + for (NodeTuple tuple : nodeValue) { + Node keyNode = tuple.getKeyNode(); + Object key = constructObject(keyNode); + if (key != null) { + try { + key.hashCode(); // check circular dependencies + } catch (Exception e) { + throw new ConstructorException( + "while constructing a mapping", + node.getStartMark(), + "found unacceptable key " + key, + tuple.getKeyNode().getStartMark(), + e); + } + } + Node valueNode = tuple.getValueNode(); + Object value = constructValueObject(valueNode); + if (keyNode.isRecursive()) { + if (settings.getAllowRecursiveKeys()) { + postponeMapFilling(mapping, key, value); + } else { + throw new YamlEngineException( + "Recursive key for mapping is detected but it is not configured to be allowed."); + } + } else { + mapping.put(key, value); + } + } + + return mapping; + } + + private Object constructValueObject(Node node) { + Object value = constructObject(node); + if (!(node instanceof ScalarNode)) { + return value; + } + if (!(value instanceof String)) { + return value; + } + + String val = (String) value; + Matcher matcher = ENV_VARIABLE_REFERENCE.matcher(val); + if (!matcher.find()) { + return value; + } + + int offset = 0; + StringBuilder newVal = new StringBuilder(); + ScalarStyle scalarStyle = ((ScalarNode) node).getScalarStyle(); + do { + MatchResult matchResult = matcher.toMatchResult(); + String envVarKey = matcher.group(1); + String defaultValue = matcher.group(3); + if (defaultValue == null) { + defaultValue = ""; + } + String replacement = environmentVariables.getOrDefault(envVarKey, defaultValue); + newVal.append(val, offset, matchResult.start()).append(replacement); + offset = matchResult.end(); + } while (matcher.find()); + if (offset != val.length()) { + newVal.append(val, offset, val.length()); + } + // If the value was double quoted, retain the double quotes so we don't change a value + // intended to be a string to a different type after environment variable substitution + if (scalarStyle == ScalarStyle.DOUBLE_QUOTED) { + newVal.insert(0, "\""); + newVal.append("\""); + } + return load.loadFromString(newVal.toString()); + } + } +} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationBuilder.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationBuilder.java new file mode 100644 index 00000000000..c96aa629fe9 --- /dev/null +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationBuilder.java @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfigurationModel; +import java.util.function.Function; + +/** Builder for the declarative configuration. */ +public class DeclarativeConfigurationBuilder implements DeclarativeConfigurationCustomizer { + private Function + modelCustomizer = Function.identity(); + + @Override + public void addModelCustomizer( + Function customizer) { + modelCustomizer = mergeCustomizer(modelCustomizer, customizer); + } + + private static Function mergeCustomizer( + Function first, Function second) { + return (I configured) -> { + O1 firstResult = first.apply(configured); + return second.apply(firstResult); + }; + } + + /** Customize the configuration model. */ + public OpenTelemetryConfigurationModel customizeModel( + OpenTelemetryConfigurationModel configurationModel) { + return modelCustomizer.apply(configurationModel); + } +} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationCustomizer.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationCustomizer.java new file mode 100644 index 00000000000..3e8e327355c --- /dev/null +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationCustomizer.java @@ -0,0 +1,21 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfigurationModel; +import java.util.function.Function; + +/** A service provider interface (SPI) for customizing declarative configuration. */ +public interface DeclarativeConfigurationCustomizer { + /** + * Method invoked when configuring the SDK to allow further customization of the declarative + * configuration. + * + * @param customizer the customizer to add + */ + void addModelCustomizer( + Function customizer); +} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationCustomizerProvider.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationCustomizerProvider.java new file mode 100644 index 00000000000..e5ec52c60d4 --- /dev/null +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationCustomizerProvider.java @@ -0,0 +1,19 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import io.opentelemetry.sdk.autoconfigure.spi.Ordered; + +/** A service provider interface (SPI) for customizing declarative configuration. */ +public interface DeclarativeConfigurationCustomizerProvider extends Ordered { + /** + * Method invoked when configuring the SDK to allow further customization of the declarative + * configuration. + * + * @param customizer the customizer to add + */ + void customize(DeclarativeConfigurationCustomizer customizer); +} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/Factory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/Factory.java index 609fcd4f902..c5f5e7c59d3 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/Factory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/Factory.java @@ -8,7 +8,6 @@ import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; import java.io.Closeable; import java.util.List; -import javax.annotation.Nullable; interface Factory { @@ -20,5 +19,5 @@ interface Factory { * @param closeables mutable list of closeables created * @return the {@link ResultT} */ - ResultT create(@Nullable ModelT model, SpiHelper spiHelper, List closeables); + ResultT create(ModelT model, SpiHelper spiHelper, List closeables); } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/FileConfigUtil.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/FileConfigUtil.java index f1c882a48b9..a84143ede67 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/FileConfigUtil.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/FileConfigUtil.java @@ -5,8 +5,14 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; import java.io.Closeable; import java.util.List; +import java.util.function.Function; +import java.util.stream.Collectors; import javax.annotation.Nullable; final class FileConfigUtil { @@ -27,4 +33,74 @@ static T assertNotNull(@Nullable T object, String description) { } return object; } + + static T requireNonNull(@Nullable T object, String description) { + if (object == null) { + throw new DeclarativeConfigException(description + " is required but is null"); + } + return object; + } + + /** + * Find a registered {@link ComponentProvider} which {@link ComponentProvider#getType()} matching + * {@code type}, {@link ComponentProvider#getName()} matching {@code name}, and call {@link + * ComponentProvider#create(DeclarativeConfigProperties)} with the given {@code model}. + * + * @throws DeclarativeConfigException if no matching providers are found, or if multiple are found + * (i.e. conflict), or if {@link ComponentProvider#create(DeclarativeConfigProperties)} throws + */ + static T loadComponent(SpiHelper spiHelper, Class type, String name, Object model) { + // Map model to generic structured config properties + DeclarativeConfigProperties config = + DeclarativeConfiguration.toConfigProperties(model, spiHelper.getComponentLoader()); + return loadComponentHelper(spiHelper, type, name, config); + } + + /** + * Find a registered {@link ComponentProvider} with {@link ComponentProvider#getType()} matching + * {@code type}, {@link ComponentProvider#getName()} matching {@code name}, and call {@link + * ComponentProvider#create(DeclarativeConfigProperties)} with the given {@code config}. + * + * @throws DeclarativeConfigException if no matching providers are found, or if multiple are found + * (i.e. conflict), or if {@link ComponentProvider#create(DeclarativeConfigProperties)} throws + */ + @SuppressWarnings({"unchecked", "rawtypes"}) + private static T loadComponentHelper( + SpiHelper spiHelper, Class type, String name, DeclarativeConfigProperties config) { + // TODO(jack-berg): cache loaded component providers + List componentProviders = spiHelper.load(ComponentProvider.class); + List> matchedProviders = + componentProviders.stream() + .map( + (Function>) + componentProvider -> componentProvider) + .filter( + componentProvider -> + componentProvider.getType() == type && name.equals(componentProvider.getName())) + .collect(Collectors.toList()); + if (matchedProviders.isEmpty()) { + throw new DeclarativeConfigException( + "No component provider detected for " + type.getName() + " with name \"" + name + "\"."); + } + if (matchedProviders.size() > 1) { + throw new DeclarativeConfigException( + "Component provider conflict. Multiple providers detected for " + + type.getName() + + " with name \"" + + name + + "\": " + + componentProviders.stream() + .map(provider -> provider.getClass().getName()) + .collect(Collectors.joining(",", "[", "]"))); + } + // Exactly one matching component provider + ComponentProvider provider = (ComponentProvider) matchedProviders.get(0); + + try { + return provider.create(config); + } catch (Throwable throwable) { + throw new DeclarativeConfigException( + "Error configuring " + type.getName() + " with name \"" + name + "\"", throwable); + } + } } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/InstrumentSelectorFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/InstrumentSelectorFactory.java index ec117228b64..d45d1ac3218 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/InstrumentSelectorFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/InstrumentSelectorFactory.java @@ -5,17 +5,16 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Selector; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SelectorModel; import io.opentelemetry.sdk.metrics.InstrumentSelector; import io.opentelemetry.sdk.metrics.InstrumentSelectorBuilder; import io.opentelemetry.sdk.metrics.InstrumentType; import java.io.Closeable; import java.util.List; -import javax.annotation.Nullable; -final class InstrumentSelectorFactory implements Factory { +final class InstrumentSelectorFactory implements Factory { private static final InstrumentSelectorFactory INSTANCE = new InstrumentSelectorFactory(); @@ -27,11 +26,7 @@ static InstrumentSelectorFactory getInstance() { @Override public InstrumentSelector create( - @Nullable Selector model, SpiHelper spiHelper, List closeables) { - if (model == null) { - throw new ConfigurationException("selector must not be null"); - } - + SelectorModel model, SpiHelper spiHelper, List closeables) { InstrumentSelectorBuilder builder = InstrumentSelector.builder(); if (model.getInstrumentName() != null) { builder.setName(model.getInstrumentName()); @@ -41,7 +36,7 @@ public InstrumentSelector create( try { instrumentType = InstrumentType.valueOf(model.getInstrumentType().name()); } catch (IllegalArgumentException e) { - throw new ConfigurationException( + throw new DeclarativeConfigException( "Unrecognized instrument type: " + model.getInstrumentType(), e); } builder.setType(instrumentType); @@ -59,7 +54,7 @@ public InstrumentSelector create( try { return builder.build(); } catch (IllegalArgumentException e) { - throw new ConfigurationException("Invalid selector", e); + throw new DeclarativeConfigException("Invalid selector", e); } } } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogLimitsFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogLimitsFactory.java index d4a7dea5c16..a2f4ba2e6c4 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogLimitsFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogLimitsFactory.java @@ -6,13 +6,12 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimits; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimitsModel; import io.opentelemetry.sdk.logs.LogLimits; import io.opentelemetry.sdk.logs.LogLimitsBuilder; import java.io.Closeable; import java.util.List; -import javax.annotation.Nullable; final class LogLimitsFactory implements Factory { @@ -26,15 +25,10 @@ static LogLimitsFactory getInstance() { @Override public LogLimits create( - @Nullable LogRecordLimitsAndAttributeLimits model, - SpiHelper spiHelper, - List closeables) { - if (model == null) { - return LogLimits.getDefault(); - } + LogRecordLimitsAndAttributeLimits model, SpiHelper spiHelper, List closeables) { LogLimitsBuilder builder = LogLimits.builder(); - AttributeLimits attributeLimitsModel = model.getAttributeLimits(); + AttributeLimitsModel attributeLimitsModel = model.getAttributeLimits(); if (attributeLimitsModel != null) { if (attributeLimitsModel.getAttributeCountLimit() != null) { builder.setMaxNumberOfAttributes(attributeLimitsModel.getAttributeCountLimit()); @@ -44,7 +38,7 @@ public LogLimits create( } } - LogRecordLimits logRecordLimitsModel = model.getLogRecordLimits(); + LogRecordLimitsModel logRecordLimitsModel = model.getLogRecordLimits(); if (logRecordLimitsModel != null) { if (logRecordLimitsModel.getAttributeCountLimit() != null) { builder.setMaxNumberOfAttributes(logRecordLimitsModel.getAttributeCountLimit()); diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordExporterFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordExporterFactory.java index 3469ec61e36..1af81a7e7f6 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordExporterFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordExporterFactory.java @@ -7,24 +7,17 @@ import static java.util.stream.Collectors.joining; -import io.opentelemetry.sdk.autoconfigure.internal.NamedSpiManager; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Otlp; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ConsoleModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpModel; import io.opentelemetry.sdk.logs.export.LogRecordExporter; import java.io.Closeable; -import java.util.HashMap; import java.util.List; import java.util.Map; -import javax.annotation.Nullable; -final class LogRecordExporterFactory - implements Factory< - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporter, - LogRecordExporter> { +final class LogRecordExporterFactory implements Factory { private static final LogRecordExporterFactory INSTANCE = new LogRecordExporterFactory(); @@ -36,79 +29,39 @@ static LogRecordExporterFactory getInstance() { @Override public LogRecordExporter create( - @Nullable - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporter - model, - SpiHelper spiHelper, - List closeables) { - if (model == null) { - return LogRecordExporter.composite(); - } - - Otlp otlpModel = model.getOtlp(); + LogRecordExporterModel model, SpiHelper spiHelper, List closeables) { + OtlpModel otlpModel = model.getOtlp(); if (otlpModel != null) { - return FileConfigUtil.addAndReturn(closeables, createOtlpExporter(otlpModel, spiHelper)); + model.getAdditionalProperties().put("otlp", otlpModel); } - // TODO(jack-berg): add support for generic SPI exporters - if (!model.getAdditionalProperties().isEmpty()) { - throw new ConfigurationException( - "Unrecognized log record exporter(s): " - + model.getAdditionalProperties().keySet().stream().collect(joining(",", "[", "]"))); + ConsoleModel consoleModel = model.getConsole(); + if (consoleModel != null) { + model.getAdditionalProperties().put("console", consoleModel); } - return LogRecordExporter.composite(); - } - - private static LogRecordExporter createOtlpExporter(Otlp otlp, SpiHelper spiHelper) { - // Translate from file configuration scheme to environment variable scheme. This is ultimately - // interpreted by Otlp*ExporterProviders, but we want to avoid the dependency on - // opentelemetry-exporter-otlp - Map properties = new HashMap<>(); - if (otlp.getProtocol() != null) { - properties.put("otel.exporter.otlp.logs.protocol", otlp.getProtocol()); - } - if (otlp.getEndpoint() != null) { - // NOTE: Set general otel.exporter.otlp.endpoint instead of signal specific - // otel.exporter.otlp.logs.endpoint to allow signal path (i.e. /v1/logs) to be added if not - // present - properties.put("otel.exporter.otlp.endpoint", otlp.getEndpoint()); - } - if (otlp.getHeaders() != null) { - properties.put( - "otel.exporter.otlp.logs.headers", - otlp.getHeaders().getAdditionalProperties().entrySet().stream() - .map(entry -> entry.getKey() + "=" + entry.getValue()) - .collect(joining(","))); - } - if (otlp.getCompression() != null) { - properties.put("otel.exporter.otlp.logs.compression", otlp.getCompression()); - } - if (otlp.getTimeout() != null) { - properties.put("otel.exporter.otlp.logs.timeout", Integer.toString(otlp.getTimeout())); - } - if (otlp.getCertificate() != null) { - properties.put("otel.exporter.otlp.logs.certificate", otlp.getCertificate()); - } - if (otlp.getClientKey() != null) { - properties.put("otel.exporter.otlp.logs.client.key", otlp.getClientKey()); - } - if (otlp.getClientCertificate() != null) { - properties.put("otel.exporter.otlp.logs.client.certificate", otlp.getClientCertificate()); + if (!model.getAdditionalProperties().isEmpty()) { + Map additionalProperties = model.getAdditionalProperties(); + if (additionalProperties.size() > 1) { + throw new DeclarativeConfigException( + "Invalid configuration - multiple log record exporters set: " + + additionalProperties.keySet().stream().collect(joining(",", "[", "]"))); + } + Map.Entry exporterKeyValue = + additionalProperties.entrySet().stream() + .findFirst() + .orElseThrow( + () -> + new IllegalStateException("Missing exporter. This is a programming error.")); + LogRecordExporter logRecordExporter = + FileConfigUtil.loadComponent( + spiHelper, + LogRecordExporter.class, + exporterKeyValue.getKey(), + exporterKeyValue.getValue()); + return FileConfigUtil.addAndReturn(closeables, logRecordExporter); + } else { + throw new DeclarativeConfigException("log exporter must be set"); } - - ConfigProperties configProperties = DefaultConfigProperties.createFromMap(properties); - return FileConfigUtil.assertNotNull( - logRecordExporterSpiManager(configProperties, spiHelper).getByName("otlp"), - "otlp exporter"); - } - - private static NamedSpiManager logRecordExporterSpiManager( - ConfigProperties config, SpiHelper spiHelper) { - return spiHelper.loadConfigurable( - ConfigurableLogRecordExporterProvider.class, - ConfigurableLogRecordExporterProvider::getName, - ConfigurableLogRecordExporterProvider::createExporter, - config); } } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordLimitsAndAttributeLimits.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordLimitsAndAttributeLimits.java index ffcb42cdd48..bfea53b9a0e 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordLimitsAndAttributeLimits.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordLimitsAndAttributeLimits.java @@ -6,21 +6,21 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; import com.google.auto.value.AutoValue; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimits; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimitsModel; import javax.annotation.Nullable; @AutoValue abstract class LogRecordLimitsAndAttributeLimits { static LogRecordLimitsAndAttributeLimits create( - @Nullable AttributeLimits attributeLimits, @Nullable LogRecordLimits spanLimits) { + @Nullable AttributeLimitsModel attributeLimits, @Nullable LogRecordLimitsModel spanLimits) { return new AutoValue_LogRecordLimitsAndAttributeLimits(attributeLimits, spanLimits); } @Nullable - abstract AttributeLimits getAttributeLimits(); + abstract AttributeLimitsModel getAttributeLimits(); @Nullable - abstract LogRecordLimits getLogRecordLimits(); + abstract LogRecordLimitsModel getLogRecordLimits(); } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordProcessorFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordProcessorFactory.java index 5971d8537a9..2bee5d9a8de 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordProcessorFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordProcessorFactory.java @@ -7,22 +7,24 @@ import static java.util.stream.Collectors.joining; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporter; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchLogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleLogRecordProcessorModel; import io.opentelemetry.sdk.logs.LogRecordProcessor; import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor; import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessorBuilder; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor; import java.io.Closeable; import java.time.Duration; import java.util.List; -import javax.annotation.Nullable; +import java.util.Map; final class LogRecordProcessorFactory - implements Factory< - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessor, - LogRecordProcessor> { + implements Factory { private static final LogRecordProcessorFactory INSTANCE = new LogRecordProcessorFactory(); @@ -34,26 +36,16 @@ static LogRecordProcessorFactory getInstance() { @Override public LogRecordProcessor create( - @Nullable - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessor - model, - SpiHelper spiHelper, - List closeables) { - if (model == null) { - return LogRecordProcessor.composite(); - } - - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchLogRecordProcessor - batchModel = model.getBatch(); + LogRecordProcessorModel model, SpiHelper spiHelper, List closeables) { + BatchLogRecordProcessorModel batchModel = model.getBatch(); if (batchModel != null) { - LogRecordExporter exporterModel = batchModel.getExporter(); - if (exporterModel == null) { - return LogRecordProcessor.composite(); - } + LogRecordExporterModel exporterModel = + FileConfigUtil.requireNonNull( + batchModel.getExporter(), "batch log record processor exporter"); - BatchLogRecordProcessorBuilder builder = - BatchLogRecordProcessor.builder( - LogRecordExporterFactory.getInstance().create(exporterModel, spiHelper, closeables)); + LogRecordExporter logRecordExporter = + LogRecordExporterFactory.getInstance().create(exporterModel, spiHelper, closeables); + BatchLogRecordProcessorBuilder builder = BatchLogRecordProcessor.builder(logRecordExporter); if (batchModel.getExportTimeout() != null) { builder.setExporterTimeout(Duration.ofMillis(batchModel.getExportTimeout())); } @@ -69,27 +61,39 @@ public LogRecordProcessor create( return FileConfigUtil.addAndReturn(closeables, builder.build()); } - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleLogRecordProcessor - simpleModel = model.getSimple(); + SimpleLogRecordProcessorModel simpleModel = model.getSimple(); if (simpleModel != null) { - LogRecordExporter exporterModel = simpleModel.getExporter(); - if (exporterModel == null) { - return LogRecordProcessor.composite(); - } - + LogRecordExporterModel exporterModel = + FileConfigUtil.requireNonNull( + simpleModel.getExporter(), "simple log record processor exporter"); + LogRecordExporter logRecordExporter = + LogRecordExporterFactory.getInstance().create(exporterModel, spiHelper, closeables); return FileConfigUtil.addAndReturn( - closeables, - SimpleLogRecordProcessor.create( - LogRecordExporterFactory.getInstance().create(exporterModel, spiHelper, closeables))); + closeables, SimpleLogRecordProcessor.create(logRecordExporter)); } - // TODO: add support for generic log record processors if (!model.getAdditionalProperties().isEmpty()) { - throw new ConfigurationException( - "Unrecognized log record processor(s): " - + model.getAdditionalProperties().keySet().stream().collect(joining(",", "[", "]"))); + Map additionalProperties = model.getAdditionalProperties(); + if (additionalProperties.size() > 1) { + throw new DeclarativeConfigException( + "Invalid configuration - multiple log record processors set: " + + additionalProperties.keySet().stream().collect(joining(",", "[", "]"))); + } + Map.Entry processorKeyValue = + additionalProperties.entrySet().stream() + .findFirst() + .orElseThrow( + () -> + new IllegalStateException("Missing processor. This is a programming error.")); + LogRecordProcessor logRecordProcessor = + FileConfigUtil.loadComponent( + spiHelper, + LogRecordProcessor.class, + processorKeyValue.getKey(), + processorKeyValue.getValue()); + return FileConfigUtil.addAndReturn(closeables, logRecordProcessor); + } else { + throw new DeclarativeConfigException("log processor must be set"); } - - return LogRecordProcessor.composite(); } } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LoggerProviderAndAttributeLimits.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LoggerProviderAndAttributeLimits.java index 43263c0972b..06abbb623f3 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LoggerProviderAndAttributeLimits.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LoggerProviderAndAttributeLimits.java @@ -6,21 +6,22 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; import com.google.auto.value.AutoValue; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LoggerProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LoggerProviderModel; import javax.annotation.Nullable; @AutoValue abstract class LoggerProviderAndAttributeLimits { static LoggerProviderAndAttributeLimits create( - @Nullable AttributeLimits attributeLimits, @Nullable LoggerProvider loggerProvider) { + @Nullable AttributeLimitsModel attributeLimits, + @Nullable LoggerProviderModel loggerProvider) { return new AutoValue_LoggerProviderAndAttributeLimits(attributeLimits, loggerProvider); } @Nullable - abstract AttributeLimits getAttributeLimits(); + abstract AttributeLimitsModel getAttributeLimits(); @Nullable - abstract LoggerProvider getLoggerProvider(); + abstract LoggerProviderModel getLoggerProvider(); } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LoggerProviderFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LoggerProviderFactory.java index 36bbce54566..ceb208697a2 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LoggerProviderFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LoggerProviderFactory.java @@ -6,14 +6,13 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LoggerProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LoggerProviderModel; import io.opentelemetry.sdk.logs.LogLimits; import io.opentelemetry.sdk.logs.SdkLoggerProvider; import io.opentelemetry.sdk.logs.SdkLoggerProviderBuilder; import java.io.Closeable; import java.util.List; -import javax.annotation.Nullable; final class LoggerProviderFactory implements Factory { @@ -28,14 +27,10 @@ static LoggerProviderFactory getInstance() { @Override public SdkLoggerProviderBuilder create( - @Nullable LoggerProviderAndAttributeLimits model, - SpiHelper spiHelper, - List closeables) { + LoggerProviderAndAttributeLimits model, SpiHelper spiHelper, List closeables) { SdkLoggerProviderBuilder builder = SdkLoggerProvider.builder(); - if (model == null) { - return builder; - } - LoggerProvider loggerProviderModel = model.getLoggerProvider(); + + LoggerProviderModel loggerProviderModel = model.getLoggerProvider(); if (loggerProviderModel == null) { return builder; } @@ -49,7 +44,7 @@ public SdkLoggerProviderBuilder create( closeables); builder.setLogLimits(() -> logLimits); - List processors = loggerProviderModel.getProcessors(); + List processors = loggerProviderModel.getProcessors(); if (processors != null) { processors.forEach( processor -> diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MeterProviderFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MeterProviderFactory.java index 0acb6c703f0..864277b8398 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MeterProviderFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MeterProviderFactory.java @@ -5,17 +5,21 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; +import static io.opentelemetry.sdk.extension.incubator.fileconfig.FileConfigUtil.requireNonNull; + import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MeterProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReader; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.View; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MeterProviderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SelectorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.StreamModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ViewModel; import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; +import io.opentelemetry.sdk.metrics.export.MetricReader; import java.io.Closeable; import java.util.List; -import javax.annotation.Nullable; -final class MeterProviderFactory implements Factory { +final class MeterProviderFactory implements Factory { private static final MeterProviderFactory INSTANCE = new MeterProviderFactory(); @@ -27,18 +31,14 @@ static MeterProviderFactory getInstance() { @Override public SdkMeterProviderBuilder create( - @Nullable MeterProvider model, SpiHelper spiHelper, List closeables) { - if (model == null) { - return SdkMeterProvider.builder(); - } - + MeterProviderModel model, SpiHelper spiHelper, List closeables) { SdkMeterProviderBuilder builder = SdkMeterProvider.builder(); - List readerModels = model.getReaders(); + List readerModels = model.getReaders(); if (readerModels != null) { readerModels.forEach( readerModel -> { - io.opentelemetry.sdk.metrics.export.MetricReader metricReader = + MetricReader metricReader = MetricReaderFactory.getInstance().create(readerModel, spiHelper, closeables); if (metricReader != null) { builder.registerMetricReader(metricReader); @@ -46,14 +46,16 @@ public SdkMeterProviderBuilder create( }); } - List viewModels = model.getViews(); + List viewModels = model.getViews(); if (viewModels != null) { viewModels.forEach( - viewModel -> - builder.registerView( - InstrumentSelectorFactory.getInstance() - .create(viewModel.getSelector(), spiHelper, closeables), - ViewFactory.getInstance().create(viewModel.getStream(), spiHelper, closeables))); + viewModel -> { + SelectorModel selector = requireNonNull(viewModel.getSelector(), "view selector"); + StreamModel stream = requireNonNull(viewModel.getStream(), "view stream"); + builder.registerView( + InstrumentSelectorFactory.getInstance().create(selector, spiHelper, closeables), + ViewFactory.getInstance().create(stream, spiHelper, closeables)); + }); } return builder; diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricExporterFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricExporterFactory.java index 7d187f8a057..0b5e12aab8e 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricExporterFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricExporterFactory.java @@ -7,25 +7,16 @@ import static java.util.stream.Collectors.joining; -import io.opentelemetry.sdk.autoconfigure.internal.NamedSpiManager; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetric; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetricModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PushMetricExporterModel; import io.opentelemetry.sdk.metrics.export.MetricExporter; import java.io.Closeable; -import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; -import javax.annotation.Nullable; -final class MetricExporterFactory - implements Factory< - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricExporter, - MetricExporter> { +final class MetricExporterFactory implements Factory { private static final MetricExporterFactory INSTANCE = new MetricExporterFactory(); @@ -35,107 +26,40 @@ static MetricExporterFactory getInstance() { return INSTANCE; } - @SuppressWarnings("NullAway") // Override superclass non-null response @Override - @Nullable public MetricExporter create( - @Nullable - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricExporter model, - SpiHelper spiHelper, - List closeables) { - if (model == null) { - return null; - } - - OtlpMetric otlpModel = model.getOtlp(); + PushMetricExporterModel model, SpiHelper spiHelper, List closeables) { + OtlpMetricModel otlpModel = model.getOtlp(); if (otlpModel != null) { - return FileConfigUtil.addAndReturn(closeables, createOtlpExporter(otlpModel, spiHelper)); + model.getAdditionalProperties().put("otlp", otlpModel); } if (model.getConsole() != null) { - return FileConfigUtil.addAndReturn(closeables, createConsoleExporter(spiHelper)); - } - - if (model.getPrometheus() != null) { - throw new ConfigurationException("prometheus exporter not supported in this context"); + model.getAdditionalProperties().put("console", model.getConsole()); } - // TODO(jack-berg): add support for generic SPI exporters if (!model.getAdditionalProperties().isEmpty()) { - throw new ConfigurationException( - "Unrecognized metric exporter(s): " - + model.getAdditionalProperties().keySet().stream().collect(joining(",", "[", "]"))); - } - - return null; - } - - private static MetricExporter createOtlpExporter(OtlpMetric model, SpiHelper spiHelper) { - // Translate from file configuration scheme to environment variable scheme. This is ultimately - // interpreted by Otlp*ExporterProviders, but we want to avoid the dependency on - // opentelemetry-exporter-otlp - Map properties = new HashMap<>(); - if (model.getProtocol() != null) { - properties.put("otel.exporter.otlp.metrics.protocol", model.getProtocol()); - } - if (model.getEndpoint() != null) { - // NOTE: Set general otel.exporter.otlp.endpoint instead of signal specific - // otel.exporter.otlp.metrics.endpoint to allow signal path (i.e. /v1/metrics) to be added - // if not - // present - properties.put("otel.exporter.otlp.endpoint", model.getEndpoint()); - } - if (model.getHeaders() != null) { - properties.put( - "otel.exporter.otlp.metrics.headers", - model.getHeaders().getAdditionalProperties().entrySet().stream() - .map(entry -> entry.getKey() + "=" + entry.getValue()) - .collect(joining(","))); - } - if (model.getCompression() != null) { - properties.put("otel.exporter.otlp.metrics.compression", model.getCompression()); + Map additionalProperties = model.getAdditionalProperties(); + if (additionalProperties.size() > 1) { + throw new DeclarativeConfigException( + "Invalid configuration - multiple metric exporters set: " + + additionalProperties.keySet().stream().collect(joining(",", "[", "]"))); + } + Map.Entry exporterKeyValue = + additionalProperties.entrySet().stream() + .findFirst() + .orElseThrow( + () -> + new IllegalStateException("Missing exporter. This is a programming error.")); + MetricExporter metricExporter = + FileConfigUtil.loadComponent( + spiHelper, + MetricExporter.class, + exporterKeyValue.getKey(), + exporterKeyValue.getValue()); + return FileConfigUtil.addAndReturn(closeables, metricExporter); + } else { + throw new DeclarativeConfigException("metric exporter must be set"); } - if (model.getTimeout() != null) { - properties.put("otel.exporter.otlp.metrics.timeout", Integer.toString(model.getTimeout())); - } - if (model.getCertificate() != null) { - properties.put("otel.exporter.otlp.metrics.certificate", model.getCertificate()); - } - if (model.getClientKey() != null) { - properties.put("otel.exporter.otlp.metrics.client.key", model.getClientKey()); - } - if (model.getClientCertificate() != null) { - properties.put("otel.exporter.otlp.metrics.client.certificate", model.getClientCertificate()); - } - if (model.getDefaultHistogramAggregation() != null) { - properties.put( - "otel.exporter.otlp.metrics.default.histogram.aggregation", - model.getDefaultHistogramAggregation().value()); - } - if (model.getTemporalityPreference() != null) { - properties.put( - "otel.exporter.otlp.metrics.temporality.preference", model.getTemporalityPreference()); - } - - ConfigProperties configProperties = DefaultConfigProperties.createFromMap(properties); - return FileConfigUtil.assertNotNull( - metricExporterSpiManager(configProperties, spiHelper).getByName("otlp"), "otlp exporter"); - } - - private static MetricExporter createConsoleExporter(SpiHelper spiHelper) { - return FileConfigUtil.assertNotNull( - metricExporterSpiManager( - DefaultConfigProperties.createFromMap(Collections.emptyMap()), spiHelper) - .getByName("logging"), - "logging exporter"); - } - - private static NamedSpiManager metricExporterSpiManager( - ConfigProperties config, SpiHelper spiHelper) { - return spiHelper.loadConfigurable( - ConfigurableMetricExporterProvider.class, - ConfigurableMetricExporterProvider::getName, - ConfigurableMetricExporterProvider::createExporter, - config); } } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricReaderFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricReaderFactory.java index bcaafd6c6e3..db6227c617c 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricReaderFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricReaderFactory.java @@ -5,29 +5,25 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; -import io.opentelemetry.sdk.autoconfigure.internal.NamedSpiManager; +import static io.opentelemetry.sdk.extension.incubator.fileconfig.FileConfigUtil.requireNonNull; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.autoconfigure.spi.internal.ConfigurableMetricReaderProvider; -import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PeriodicMetricReader; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Prometheus; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PullMetricReader; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PeriodicMetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PrometheusModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PullMetricExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PullMetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PushMetricExporterModel; +import io.opentelemetry.sdk.metrics.export.MetricExporter; import io.opentelemetry.sdk.metrics.export.MetricReader; +import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; import io.opentelemetry.sdk.metrics.export.PeriodicMetricReaderBuilder; import java.io.Closeable; import java.time.Duration; -import java.util.HashMap; import java.util.List; -import java.util.Map; -import javax.annotation.Nullable; -final class MetricReaderFactory - implements Factory< - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReader, - MetricReader> { +final class MetricReaderFactory implements Factory { private static final MetricReaderFactory INSTANCE = new MetricReaderFactory(); @@ -37,79 +33,39 @@ static MetricReaderFactory getInstance() { return INSTANCE; } - @SuppressWarnings("NullAway") // Override superclass non-null response @Override - @Nullable public MetricReader create( - @Nullable - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReader model, - SpiHelper spiHelper, - List closeables) { - if (model == null) { - return null; - } - - PeriodicMetricReader periodicModel = model.getPeriodic(); + MetricReaderModel model, SpiHelper spiHelper, List closeables) { + PeriodicMetricReaderModel periodicModel = model.getPeriodic(); if (periodicModel != null) { - MetricExporter exporterModel = periodicModel.getExporter(); - if (exporterModel == null) { - throw new ConfigurationException("exporter required for periodic reader"); - } - io.opentelemetry.sdk.metrics.export.MetricExporter metricExporter = + PushMetricExporterModel exporterModel = + requireNonNull(periodicModel.getExporter(), "periodic metric reader exporter"); + MetricExporter metricExporter = MetricExporterFactory.getInstance().create(exporterModel, spiHelper, closeables); - if (metricExporter == null) { - return null; - } PeriodicMetricReaderBuilder builder = - io.opentelemetry.sdk.metrics.export.PeriodicMetricReader.builder( - FileConfigUtil.addAndReturn(closeables, metricExporter)); + PeriodicMetricReader.builder(FileConfigUtil.addAndReturn(closeables, metricExporter)); if (periodicModel.getInterval() != null) { builder.setInterval(Duration.ofMillis(periodicModel.getInterval())); } return FileConfigUtil.addAndReturn(closeables, builder.build()); } - PullMetricReader pullModel = model.getPull(); + PullMetricReaderModel pullModel = model.getPull(); if (pullModel != null) { - MetricExporter exporterModel = pullModel.getExporter(); - if (exporterModel == null) { - throw new ConfigurationException("exporter required for pull reader"); - } - Prometheus prometheusModel = exporterModel.getPrometheus(); + PullMetricExporterModel exporterModel = + requireNonNull(pullModel.getExporter(), "pull metric reader exporter"); + PrometheusModel prometheusModel = exporterModel.getPrometheus(); if (prometheusModel != null) { - // Translate from file configuration scheme to environment variable scheme. This is - // ultimately - // interpreted by PrometheusMetricReaderProvider, but we want to avoid the dependency on - // opentelemetry-exporter-prometheus - Map properties = new HashMap<>(); - if (prometheusModel.getHost() != null) { - properties.put("otel.exporter.prometheus.host", prometheusModel.getHost()); - } - if (prometheusModel.getPort() != null) { - properties.put( - "otel.exporter.prometheus.port", String.valueOf(prometheusModel.getPort())); - } - - ConfigProperties configProperties = DefaultConfigProperties.createFromMap(properties); - return FileConfigUtil.addAndReturn( - closeables, - FileConfigUtil.assertNotNull( - metricReaderSpiManager(configProperties, spiHelper).getByName("prometheus"), - "prometheus reader")); + MetricReader metricReader = + FileConfigUtil.loadComponent( + spiHelper, MetricReader.class, "prometheus", prometheusModel); + return FileConfigUtil.addAndReturn(closeables, metricReader); } - throw new ConfigurationException("prometheus is the only currently supported pull reader"); + throw new DeclarativeConfigException( + "prometheus is the only currently supported pull reader"); } - return null; - } - - private static NamedSpiManager - metricReaderSpiManager(ConfigProperties config, SpiHelper spiHelper) { - return spiHelper.loadConfigurable( - ConfigurableMetricReaderProvider.class, - ConfigurableMetricReaderProvider::getName, - ConfigurableMetricReaderProvider::createMetricReader, - config); + throw new DeclarativeConfigException("reader must be set"); } } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/OpenTelemetryConfigurationFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/OpenTelemetryConfigurationFactory.java index 17c68f9ada2..8f19de2b721 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/OpenTelemetryConfigurationFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/OpenTelemetryConfigurationFactory.java @@ -5,19 +5,18 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.OpenTelemetrySdkBuilder; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfiguration; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfigurationModel; import io.opentelemetry.sdk.resources.Resource; import java.io.Closeable; import java.util.List; import java.util.Objects; -import javax.annotation.Nullable; final class OpenTelemetryConfigurationFactory - implements Factory { + implements Factory { private static final OpenTelemetryConfigurationFactory INSTANCE = new OpenTelemetryConfigurationFactory(); @@ -30,25 +29,26 @@ static OpenTelemetryConfigurationFactory getInstance() { @Override public OpenTelemetrySdk create( - @Nullable OpenTelemetryConfiguration model, SpiHelper spiHelper, List closeables) { + OpenTelemetryConfigurationModel model, SpiHelper spiHelper, List closeables) { OpenTelemetrySdkBuilder builder = OpenTelemetrySdk.builder(); - if (model == null) { - return FileConfigUtil.addAndReturn(closeables, builder.build()); - } - - if (!"0.1".equals(model.getFileFormat())) { - throw new ConfigurationException("Unsupported file format. Supported formats include: 0.1"); + if (!"0.3".equals(model.getFileFormat())) { + throw new DeclarativeConfigException( + "Unsupported file format. Supported formats include: 0.3"); } if (Objects.equals(Boolean.TRUE, model.getDisabled())) { return builder.build(); } - builder.setPropagators( - PropagatorsFactory.getInstance().create(model.getPropagators(), spiHelper, closeables)); + if (model.getPropagator() != null) { + builder.setPropagators( + PropagatorFactory.getInstance().create(model.getPropagator(), spiHelper, closeables)); + } - Resource resource = - ResourceFactory.getInstance().create(model.getResource(), spiHelper, closeables); + Resource resource = Resource.getDefault(); + if (model.getResource() != null) { + resource = ResourceFactory.getInstance().create(model.getResource(), spiHelper, closeables); + } if (model.getLoggerProvider() != null) { builder.setLoggerProvider( diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/PropagatorFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/PropagatorFactory.java new file mode 100644 index 00000000000..c53b6699062 --- /dev/null +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/PropagatorFactory.java @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import static io.opentelemetry.sdk.extension.incubator.fileconfig.FileConfigUtil.requireNonNull; + +import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.context.propagation.TextMapPropagator; +import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PropagatorModel; +import java.io.Closeable; +import java.util.List; + +final class PropagatorFactory implements Factory { + + private static final PropagatorFactory INSTANCE = new PropagatorFactory(); + + private PropagatorFactory() {} + + static PropagatorFactory getInstance() { + return INSTANCE; + } + + @Override + public ContextPropagators create( + PropagatorModel model, SpiHelper spiHelper, List closeables) { + List compositeModel = requireNonNull(model.getComposite(), "composite propagator"); + TextMapPropagator textMapPropagator = + TextMapPropagatorFactory.getInstance().create(compositeModel, spiHelper, closeables); + return ContextPropagators.create(textMapPropagator); + } +} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/PropagatorsFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/PropagatorsFactory.java deleted file mode 100644 index cbd6c82bbc0..00000000000 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/PropagatorsFactory.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.extension.incubator.fileconfig; - -import io.opentelemetry.api.baggage.propagation.W3CBaggagePropagator; -import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; -import io.opentelemetry.context.propagation.ContextPropagators; -import io.opentelemetry.context.propagation.TextMapPropagator; -import io.opentelemetry.sdk.autoconfigure.internal.NamedSpiManager; -import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurablePropagatorProvider; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; -import java.io.Closeable; -import java.util.Arrays; -import java.util.Collections; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Set; -import javax.annotation.Nullable; - -final class PropagatorsFactory implements Factory, ContextPropagators> { - - private static final PropagatorsFactory INSTANCE = new PropagatorsFactory(); - - private PropagatorsFactory() {} - - static PropagatorsFactory getInstance() { - return INSTANCE; - } - - @Override - public ContextPropagators create( - @Nullable List model, SpiHelper spiHelper, List closeables) { - if (model == null || model.isEmpty()) { - model = Arrays.asList("tracecontext", "baggage"); - } - - if (model.contains("none")) { - if (model.size() > 1) { - throw new ConfigurationException( - "propagators contains \"none\" along with other propagators"); - } - return ContextPropagators.noop(); - } - - NamedSpiManager spiPropagatorsManager = - spiHelper.loadConfigurable( - ConfigurablePropagatorProvider.class, - ConfigurablePropagatorProvider::getName, - ConfigurablePropagatorProvider::getPropagator, - DefaultConfigProperties.createFromMap(Collections.emptyMap())); - Set propagators = new LinkedHashSet<>(); - for (String propagator : model) { - propagators.add(getPropagator(propagator, spiPropagatorsManager)); - } - - return ContextPropagators.create(TextMapPropagator.composite(propagators)); - } - - private static TextMapPropagator getPropagator( - String name, NamedSpiManager spiPropagatorsManager) { - if (name.equals("tracecontext")) { - return W3CTraceContextPropagator.getInstance(); - } - if (name.equals("baggage")) { - return W3CBaggagePropagator.getInstance(); - } - - TextMapPropagator spiPropagator = spiPropagatorsManager.getByName(name); - if (spiPropagator != null) { - return spiPropagator; - } - throw new ConfigurationException("Unrecognized value for otel.propagators: " + name); - } -} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ResourceFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ResourceFactory.java index 47ec3b6606a..0b240007d3f 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ResourceFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ResourceFactory.java @@ -5,15 +5,31 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; +import static io.opentelemetry.sdk.internal.GlobUtil.toGlobPatternPredicate; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.autoconfigure.ResourceConfiguration; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Attributes; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Resource; +import io.opentelemetry.sdk.autoconfigure.spi.Ordered; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeNameValueModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.DetectorAttributesModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.DetectorsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ResourceModel; +import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.resources.ResourceBuilder; import java.io.Closeable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; import java.util.List; +import java.util.function.Predicate; +import java.util.stream.Collectors; import javax.annotation.Nullable; -final class ResourceFactory implements Factory { +final class ResourceFactory implements Factory { private static final ResourceFactory INSTANCE = new ResourceFactory(); @@ -24,20 +40,145 @@ static ResourceFactory getInstance() { } @Override - public io.opentelemetry.sdk.resources.Resource create( - @Nullable Resource model, SpiHelper spiHelper, List closeables) { - if (model == null) { - return io.opentelemetry.sdk.resources.Resource.getDefault(); - } + public Resource create(ResourceModel model, SpiHelper spiHelper, List closeables) { + ResourceBuilder builder = Resource.getDefault().toBuilder(); - ResourceBuilder builder = io.opentelemetry.sdk.resources.Resource.getDefault().toBuilder(); + ResourceBuilder detectedResourceBuilder = Resource.builder(); + List resourceDetectorResources = loadFromResourceDetectors(spiHelper); + for (Resource resourceProviderResource : resourceDetectorResources) { + detectedResourceBuilder.putAll(resourceProviderResource); + } + Predicate detectorAttributeFilter = detectorAttributeFilter(model.getDetectors()); + builder + .putAll( + detectedResourceBuilder.build().getAttributes().toBuilder() + .removeIf(attributeKey -> !detectorAttributeFilter.test(attributeKey.getKey())) + .build()) + .build(); - Attributes attributesModel = model.getAttributes(); - if (attributesModel != null) { + String attributeList = model.getAttributesList(); + if (attributeList != null) { builder.putAll( - AttributesFactory.getInstance().create(attributesModel, spiHelper, closeables)); + ResourceConfiguration.createEnvironmentResource( + DefaultConfigProperties.createFromMap( + Collections.singletonMap("otel.resource.attributes", attributeList)))); + } + + List attributeNameValueModel = model.getAttributes(); + if (attributeNameValueModel != null) { + builder + .putAll( + AttributeListFactory.getInstance() + .create(attributeNameValueModel, spiHelper, closeables)) + .build(); } return builder.build(); } + + /** + * Load resources from resource detectors, in order of lowest priority to highest priority. + * + *

In declarative configuration, a resource detector is a {@link ComponentProvider} with {@link + * ComponentProvider#getType()} set to {@link Resource}. Unlike other {@link ComponentProvider}s, + * the resource detector version does not use {@link ComponentProvider#getName()} (except for + * debug messages), and {@link ComponentProvider#create(DeclarativeConfigProperties)} is called + * with an empty instance. Additionally, the {@link Ordered#order()} value is respected for + * resource detectors which implement {@link Ordered}. + */ + @SuppressWarnings("rawtypes") + private static List loadFromResourceDetectors(SpiHelper spiHelper) { + List componentProviders = spiHelper.load(ComponentProvider.class); + List resourceAndOrders = new ArrayList<>(); + for (ComponentProvider componentProvider : componentProviders) { + if (componentProvider.getType() != Resource.class) { + continue; + } + Resource resource; + try { + resource = (Resource) componentProvider.create(DeclarativeConfigProperties.empty()); + } catch (Throwable throwable) { + throw new DeclarativeConfigException( + "Error configuring " + + Resource.class.getName() + + " with name \"" + + componentProvider.getName() + + "\"", + throwable); + } + int order = + (componentProvider instanceof Ordered) ? ((Ordered) componentProvider).order() : 0; + resourceAndOrders.add(new ResourceAndOrder(resource, order)); + } + resourceAndOrders.sort(Comparator.comparing(ResourceAndOrder::order)); + return resourceAndOrders.stream().map(ResourceAndOrder::resource).collect(Collectors.toList()); + } + + private static final class ResourceAndOrder { + private final Resource resource; + private final int order; + + private ResourceAndOrder(Resource resource, int order) { + this.resource = resource; + this.order = order; + } + + private Resource resource() { + return resource; + } + + private int order() { + return order; + } + } + + private static boolean matchAll(String attributeKey) { + return true; + } + + private static Predicate detectorAttributeFilter( + @Nullable DetectorsModel detectorsModel) { + if (detectorsModel == null) { + return ResourceFactory::matchAll; + } + DetectorAttributesModel detectorAttributesModel = detectorsModel.getAttributes(); + if (detectorAttributesModel == null) { + return ResourceFactory::matchAll; + } + List included = detectorAttributesModel.getIncluded(); + List excluded = detectorAttributesModel.getExcluded(); + if (included == null && excluded == null) { + return ResourceFactory::matchAll; + } + if (included == null) { + return excludedPredicate(excluded); + } + if (excluded == null) { + return includedPredicate(included); + } + return includedPredicate(included).and(excludedPredicate(excluded)); + } + + /** + * Returns a predicate which matches strings matching any of the {@code included} glob patterns. + */ + private static Predicate includedPredicate(List included) { + Predicate result = attributeKey -> false; + for (String include : included) { + result = result.or(toGlobPatternPredicate(include)); + } + return result; + } + + /** + * Returns a predicate which matches strings NOT matching any of the {@code excluded} glob + * patterns. + */ + private static Predicate excludedPredicate(List excluded) { + Predicate result = attributeKey -> true; + for (String exclude : excluded) { + result = result.and(toGlobPatternPredicate(exclude).negate()); + } + return result; + } } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SamplerFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SamplerFactory.java index 8d881c2d03e..491355418ec 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SamplerFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SamplerFactory.java @@ -7,27 +7,19 @@ import static java.util.stream.Collectors.joining; -import io.opentelemetry.sdk.autoconfigure.internal.NamedSpiManager; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSamplerProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.JaegerRemote; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ParentBased; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TraceIdRatioBased; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.JaegerRemoteModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ParentBasedModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SamplerModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TraceIdRatioBasedModel; import io.opentelemetry.sdk.trace.samplers.ParentBasedSamplerBuilder; import io.opentelemetry.sdk.trace.samplers.Sampler; import java.io.Closeable; -import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; -import javax.annotation.Nullable; -final class SamplerFactory - implements Factory< - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Sampler, Sampler> { +final class SamplerFactory implements Factory { private static final SamplerFactory INSTANCE = new SamplerFactory(); @@ -38,21 +30,14 @@ static SamplerFactory getInstance() { } @Override - public Sampler create( - @Nullable io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Sampler model, - SpiHelper spiHelper, - List closeables) { - if (model == null) { - return Sampler.parentBased(Sampler.alwaysOn()); - } - + public Sampler create(SamplerModel model, SpiHelper spiHelper, List closeables) { if (model.getAlwaysOn() != null) { return Sampler.alwaysOn(); } if (model.getAlwaysOff() != null) { return Sampler.alwaysOff(); } - TraceIdRatioBased traceIdRatioBasedModel = model.getTraceIdRatioBased(); + TraceIdRatioBasedModel traceIdRatioBasedModel = model.getTraceIdRatioBased(); if (traceIdRatioBasedModel != null) { Double ratio = traceIdRatioBasedModel.getRatio(); if (ratio == null) { @@ -60,7 +45,7 @@ public Sampler create( } return Sampler.traceIdRatioBased(ratio); } - ParentBased parentBasedModel = model.getParentBased(); + ParentBasedModel parentBasedModel = model.getParentBased(); if (parentBasedModel != null) { Sampler root = parentBasedModel.getRoot() == null @@ -68,70 +53,49 @@ public Sampler create( : create(parentBasedModel.getRoot(), spiHelper, closeables); ParentBasedSamplerBuilder builder = Sampler.parentBasedBuilder(root); if (parentBasedModel.getRemoteParentSampled() != null) { - builder.setRemoteParentSampled( - create(parentBasedModel.getRemoteParentSampled(), spiHelper, closeables)); + Sampler sampler = create(parentBasedModel.getRemoteParentSampled(), spiHelper, closeables); + builder.setRemoteParentSampled(sampler); } if (parentBasedModel.getRemoteParentNotSampled() != null) { - builder.setRemoteParentNotSampled( - create(parentBasedModel.getRemoteParentNotSampled(), spiHelper, closeables)); + Sampler sampler = + create(parentBasedModel.getRemoteParentNotSampled(), spiHelper, closeables); + builder.setRemoteParentNotSampled(sampler); } if (parentBasedModel.getLocalParentSampled() != null) { - builder.setLocalParentSampled( - create(parentBasedModel.getLocalParentSampled(), spiHelper, closeables)); + Sampler sampler = create(parentBasedModel.getLocalParentSampled(), spiHelper, closeables); + builder.setLocalParentSampled(sampler); } if (parentBasedModel.getLocalParentNotSampled() != null) { - builder.setLocalParentNotSampled( - create(parentBasedModel.getLocalParentNotSampled(), spiHelper, closeables)); + Sampler sampler = + create(parentBasedModel.getLocalParentNotSampled(), spiHelper, closeables); + builder.setLocalParentNotSampled(sampler); } return builder.build(); } - JaegerRemote jaegerRemoteModel = model.getJaegerRemote(); + JaegerRemoteModel jaegerRemoteModel = model.getJaegerRemote(); if (jaegerRemoteModel != null) { - // Translate from file configuration scheme to environment variable scheme. This is ultimately - // interpreted by JaegerRemoteSamplerProvider, but we want to avoid the dependency on - // opentelemetry-sdk-extension-jaeger-remote-sampler - Map properties = new HashMap<>(); - if (jaegerRemoteModel.getEndpoint() != null) { - properties.put("endpoint", jaegerRemoteModel.getEndpoint()); - } - if (jaegerRemoteModel.getInterval() != null) { - properties.put("pollingInterval", String.valueOf(jaegerRemoteModel.getInterval())); - } - // TODO(jack-berg): determine how to support initial sampler. This is first case where a - // component configured via SPI has property that isn't available in the environment variable - // scheme. - String otelTraceSamplerArg = - properties.entrySet().stream() - .map(entry -> entry.getKey() + "=" + entry.getValue()) - .collect(joining(",")); - - ConfigProperties configProperties = - DefaultConfigProperties.createFromMap( - Collections.singletonMap("otel.traces.sampler.arg", otelTraceSamplerArg)); - return FileConfigUtil.addAndReturn( - closeables, - FileConfigUtil.assertNotNull( - samplerSpiManager(configProperties, spiHelper).getByName("jaeger_remote"), - "jaeger remote sampler")); + model.getAdditionalProperties().put("jaeger_remote", jaegerRemoteModel); } - // TODO(jack-berg): add support for generic SPI samplers if (!model.getAdditionalProperties().isEmpty()) { - throw new ConfigurationException( - "Unrecognized sampler(s): " - + model.getAdditionalProperties().keySet().stream().collect(joining(",", "[", "]"))); + Map additionalProperties = model.getAdditionalProperties(); + if (additionalProperties.size() > 1) { + throw new DeclarativeConfigException( + "Invalid configuration - multiple samplers exporters set: " + + additionalProperties.keySet().stream().collect(joining(",", "[", "]"))); + } + Map.Entry exporterKeyValue = + additionalProperties.entrySet().stream() + .findFirst() + .orElseThrow( + () -> new IllegalStateException("Missing sampler. This is a programming error.")); + Sampler sampler = + FileConfigUtil.loadComponent( + spiHelper, Sampler.class, exporterKeyValue.getKey(), exporterKeyValue.getValue()); + return FileConfigUtil.addAndReturn(closeables, sampler); + } else { + throw new DeclarativeConfigException("sampler must be set"); } - - return Sampler.parentBased(Sampler.alwaysOn()); - } - - private static NamedSpiManager samplerSpiManager( - ConfigProperties config, SpiHelper spiHelper) { - return spiHelper.loadConfigurable( - ConfigurableSamplerProvider.class, - ConfigurableSamplerProvider::getName, - ConfigurableSamplerProvider::createSampler, - config); } } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SdkConfigProvider.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SdkConfigProvider.java new file mode 100644 index 00000000000..8c8d549e007 --- /dev/null +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SdkConfigProvider.java @@ -0,0 +1,39 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import io.opentelemetry.api.incubator.config.ConfigProvider; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfigurationModel; +import javax.annotation.Nullable; + +/** SDK implementation of {@link ConfigProvider}. */ +public final class SdkConfigProvider implements ConfigProvider { + + @Nullable private final DeclarativeConfigProperties instrumentationConfig; + + private SdkConfigProvider(OpenTelemetryConfigurationModel model) { + DeclarativeConfigProperties configProperties = + DeclarativeConfiguration.toConfigProperties(model); + this.instrumentationConfig = configProperties.getStructured("instrumentation"); + } + + /** + * Create a {@link SdkConfigProvider} from the {@code model}. + * + * @param model the configuration model + * @return the {@link SdkConfigProvider} + */ + public static SdkConfigProvider create(OpenTelemetryConfigurationModel model) { + return new SdkConfigProvider(model); + } + + @Nullable + @Override + public DeclarativeConfigProperties getInstrumentationConfig() { + return instrumentationConfig; + } +} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanExporterFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanExporterFactory.java index 8a3b8cc6dce..a984c109033 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanExporterFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanExporterFactory.java @@ -7,26 +7,17 @@ import static java.util.stream.Collectors.joining; -import io.opentelemetry.sdk.autoconfigure.internal.NamedSpiManager; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Otlp; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Zipkin; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ZipkinModel; import io.opentelemetry.sdk.trace.export.SpanExporter; import java.io.Closeable; -import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; -import javax.annotation.Nullable; -final class SpanExporterFactory - implements Factory< - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporter, - SpanExporter> { +final class SpanExporterFactory implements Factory { private static final SpanExporterFactory INSTANCE = new SpanExporterFactory(); @@ -38,111 +29,43 @@ static SpanExporterFactory getInstance() { @Override public SpanExporter create( - @Nullable - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporter model, - SpiHelper spiHelper, - List closeables) { - if (model == null) { - return SpanExporter.composite(); - } - - Otlp otlpModel = model.getOtlp(); + SpanExporterModel model, SpiHelper spiHelper, List closeables) { + OtlpModel otlpModel = model.getOtlp(); if (otlpModel != null) { - return FileConfigUtil.addAndReturn(closeables, createOtlpExporter(otlpModel, spiHelper)); + model.getAdditionalProperties().put("otlp", otlpModel); } if (model.getConsole() != null) { - return FileConfigUtil.addAndReturn(closeables, createConsoleExporter(spiHelper)); + model.getAdditionalProperties().put("console", model.getConsole()); } - Zipkin zipkinModel = model.getZipkin(); + ZipkinModel zipkinModel = model.getZipkin(); if (zipkinModel != null) { - return FileConfigUtil.addAndReturn(closeables, createZipkinExporter(zipkinModel, spiHelper)); + model.getAdditionalProperties().put("zipkin", model.getZipkin()); } - // TODO(jack-berg): add support for generic SPI exporters if (!model.getAdditionalProperties().isEmpty()) { - throw new ConfigurationException( - "Unrecognized span exporter(s): " - + model.getAdditionalProperties().keySet().stream().collect(joining(",", "[", "]"))); - } - - return SpanExporter.composite(); - } - - private static SpanExporter createOtlpExporter(Otlp model, SpiHelper spiHelper) { - // Translate from file configuration scheme to environment variable scheme. This is ultimately - // interpreted by Otlp*ExporterProviders, but we want to avoid the dependency on - // opentelemetry-exporter-otlp - Map properties = new HashMap<>(); - if (model.getProtocol() != null) { - properties.put("otel.exporter.otlp.traces.protocol", model.getProtocol()); - } - if (model.getEndpoint() != null) { - // NOTE: Set general otel.exporter.otlp.endpoint instead of signal specific - // otel.exporter.otlp.traces.endpoint to allow signal path (i.e. /v1/traces) to be added if - // not present - properties.put("otel.exporter.otlp.endpoint", model.getEndpoint()); - } - if (model.getHeaders() != null) { - properties.put( - "otel.exporter.otlp.traces.headers", - model.getHeaders().getAdditionalProperties().entrySet().stream() - .map(entry -> entry.getKey() + "=" + entry.getValue()) - .collect(joining(","))); - } - if (model.getCompression() != null) { - properties.put("otel.exporter.otlp.traces.compression", model.getCompression()); - } - if (model.getTimeout() != null) { - properties.put("otel.exporter.otlp.traces.timeout", Integer.toString(model.getTimeout())); - } - if (model.getCertificate() != null) { - properties.put("otel.exporter.otlp.traces.certificate", model.getCertificate()); - } - if (model.getClientKey() != null) { - properties.put("otel.exporter.otlp.traces.client.key", model.getClientKey()); + Map additionalProperties = model.getAdditionalProperties(); + if (additionalProperties.size() > 1) { + throw new DeclarativeConfigException( + "Invalid configuration - multiple span exporters set: " + + additionalProperties.keySet().stream().collect(joining(",", "[", "]"))); + } + Map.Entry exporterKeyValue = + additionalProperties.entrySet().stream() + .findFirst() + .orElseThrow( + () -> + new IllegalStateException("Missing exporter. This is a programming error.")); + SpanExporter spanExporter = + FileConfigUtil.loadComponent( + spiHelper, + SpanExporter.class, + exporterKeyValue.getKey(), + exporterKeyValue.getValue()); + return FileConfigUtil.addAndReturn(closeables, spanExporter); + } else { + throw new DeclarativeConfigException("span exporter must be set"); } - if (model.getClientCertificate() != null) { - properties.put("otel.exporter.otlp.traces.client.certificate", model.getClientCertificate()); - } - - ConfigProperties configProperties = DefaultConfigProperties.createFromMap(properties); - return FileConfigUtil.assertNotNull( - spanExporterSpiManager(configProperties, spiHelper).getByName("otlp"), "otlp exporter"); - } - - private static SpanExporter createConsoleExporter(SpiHelper spiHelper) { - return FileConfigUtil.assertNotNull( - spanExporterSpiManager( - DefaultConfigProperties.createFromMap(Collections.emptyMap()), spiHelper) - .getByName("logging"), - "logging exporter"); - } - - private static SpanExporter createZipkinExporter(Zipkin model, SpiHelper spiHelper) { - // Translate from file configuration scheme to environment variable scheme. This is ultimately - // interpreted by ZipkinSpanExporterProvider, but we want to avoid the dependency on - // opentelemetry-exporter-zipkin - Map properties = new HashMap<>(); - if (model.getEndpoint() != null) { - properties.put("otel.exporter.zipkin.endpoint", model.getEndpoint()); - } - if (model.getTimeout() != null) { - properties.put("otel.exporter.zipkin.timeout", Integer.toString(model.getTimeout())); - } - - ConfigProperties configProperties = DefaultConfigProperties.createFromMap(properties); - return FileConfigUtil.assertNotNull( - spanExporterSpiManager(configProperties, spiHelper).getByName("zipkin"), "zipkin exporter"); - } - - private static NamedSpiManager spanExporterSpiManager( - ConfigProperties config, SpiHelper spiHelper) { - return spiHelper.loadConfigurable( - ConfigurableSpanExporterProvider.class, - ConfigurableSpanExporterProvider::getName, - ConfigurableSpanExporterProvider::createExporter, - config); } } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanLimitsAndAttributeLimits.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanLimitsAndAttributeLimits.java index 26049538a58..dad8dab1019 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanLimitsAndAttributeLimits.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanLimitsAndAttributeLimits.java @@ -6,21 +6,21 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; import com.google.auto.value.AutoValue; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanLimits; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanLimitsModel; import javax.annotation.Nullable; @AutoValue abstract class SpanLimitsAndAttributeLimits { static SpanLimitsAndAttributeLimits create( - @Nullable AttributeLimits attributeLimits, @Nullable SpanLimits spanLimits) { + @Nullable AttributeLimitsModel attributeLimits, @Nullable SpanLimitsModel spanLimits) { return new AutoValue_SpanLimitsAndAttributeLimits(attributeLimits, spanLimits); } @Nullable - abstract AttributeLimits getAttributeLimits(); + abstract AttributeLimitsModel getAttributeLimits(); @Nullable - abstract SpanLimits getSpanLimits(); + abstract SpanLimitsModel getSpanLimits(); } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanLimitsFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanLimitsFactory.java index ef03ad70ca8..c10c4bfd7a8 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanLimitsFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanLimitsFactory.java @@ -6,15 +6,14 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanLimits; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanLimitsModel; +import io.opentelemetry.sdk.trace.SpanLimits; import io.opentelemetry.sdk.trace.SpanLimitsBuilder; import java.io.Closeable; import java.util.List; -import javax.annotation.Nullable; -final class SpanLimitsFactory - implements Factory { +final class SpanLimitsFactory implements Factory { private static final SpanLimitsFactory INSTANCE = new SpanLimitsFactory(); @@ -25,17 +24,11 @@ static SpanLimitsFactory getInstance() { } @Override - public io.opentelemetry.sdk.trace.SpanLimits create( - @Nullable SpanLimitsAndAttributeLimits model, - SpiHelper spiHelper, - List closeables) { - if (model == null) { - return io.opentelemetry.sdk.trace.SpanLimits.getDefault(); - } - - SpanLimitsBuilder builder = io.opentelemetry.sdk.trace.SpanLimits.builder(); + public SpanLimits create( + SpanLimitsAndAttributeLimits model, SpiHelper spiHelper, List closeables) { + SpanLimitsBuilder builder = SpanLimits.builder(); - AttributeLimits attributeLimitsModel = model.getAttributeLimits(); + AttributeLimitsModel attributeLimitsModel = model.getAttributeLimits(); if (attributeLimitsModel != null) { if (attributeLimitsModel.getAttributeCountLimit() != null) { builder.setMaxNumberOfAttributes(attributeLimitsModel.getAttributeCountLimit()); @@ -45,7 +38,7 @@ public io.opentelemetry.sdk.trace.SpanLimits create( } } - SpanLimits spanLimitsModel = model.getSpanLimits(); + SpanLimitsModel spanLimitsModel = model.getSpanLimits(); if (spanLimitsModel != null) { if (spanLimitsModel.getAttributeCountLimit() != null) { builder.setMaxNumberOfAttributes(spanLimitsModel.getAttributeCountLimit()); diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanProcessorFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanProcessorFactory.java index 59b78370ae6..f663f3e97a4 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanProcessorFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanProcessorFactory.java @@ -7,22 +7,23 @@ import static java.util.stream.Collectors.joining; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporter; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchSpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleSpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessorModel; import io.opentelemetry.sdk.trace.SpanProcessor; import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; import io.opentelemetry.sdk.trace.export.BatchSpanProcessorBuilder; import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.export.SpanExporter; import java.io.Closeable; import java.time.Duration; import java.util.List; -import javax.annotation.Nullable; +import java.util.Map; -final class SpanProcessorFactory - implements Factory< - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessor, - SpanProcessor> { +final class SpanProcessorFactory implements Factory { private static final SpanProcessorFactory INSTANCE = new SpanProcessorFactory(); @@ -34,25 +35,14 @@ static SpanProcessorFactory getInstance() { @Override public SpanProcessor create( - @Nullable - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessor model, - SpiHelper spiHelper, - List closeables) { - if (model == null) { - return SpanProcessor.composite(); - } - - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchSpanProcessor - batchModel = model.getBatch(); + SpanProcessorModel model, SpiHelper spiHelper, List closeables) { + BatchSpanProcessorModel batchModel = model.getBatch(); if (batchModel != null) { - SpanExporter exporterModel = batchModel.getExporter(); - if (exporterModel == null) { - return SpanProcessor.composite(); - } - - BatchSpanProcessorBuilder builder = - BatchSpanProcessor.builder( - SpanExporterFactory.getInstance().create(exporterModel, spiHelper, closeables)); + SpanExporterModel exporterModel = + FileConfigUtil.requireNonNull(batchModel.getExporter(), "batch span processor exporter"); + SpanExporter spanExporter = + SpanExporterFactory.getInstance().create(exporterModel, spiHelper, closeables); + BatchSpanProcessorBuilder builder = BatchSpanProcessor.builder(spanExporter); if (batchModel.getExportTimeout() != null) { builder.setExporterTimeout(Duration.ofMillis(batchModel.getExportTimeout())); } @@ -68,27 +58,38 @@ public SpanProcessor create( return FileConfigUtil.addAndReturn(closeables, builder.build()); } - io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleSpanProcessor - simpleModel = model.getSimple(); + SimpleSpanProcessorModel simpleModel = model.getSimple(); if (simpleModel != null) { - SpanExporter exporterModel = simpleModel.getExporter(); - if (exporterModel == null) { - return SpanProcessor.composite(); - } - - return FileConfigUtil.addAndReturn( - closeables, - SimpleSpanProcessor.create( - SpanExporterFactory.getInstance().create(exporterModel, spiHelper, closeables))); + SpanExporterModel exporterModel = + FileConfigUtil.requireNonNull( + simpleModel.getExporter(), "simple span processor exporter"); + SpanExporter spanExporter = + SpanExporterFactory.getInstance().create(exporterModel, spiHelper, closeables); + return FileConfigUtil.addAndReturn(closeables, SimpleSpanProcessor.create(spanExporter)); } - // TODO: add support for generic span processors if (!model.getAdditionalProperties().isEmpty()) { - throw new ConfigurationException( - "Unrecognized span processor(s): " - + model.getAdditionalProperties().keySet().stream().collect(joining(",", "[", "]"))); + Map additionalProperties = model.getAdditionalProperties(); + if (additionalProperties.size() > 1) { + throw new DeclarativeConfigException( + "Invalid configuration - multiple span processors set: " + + additionalProperties.keySet().stream().collect(joining(",", "[", "]"))); + } + Map.Entry processorKeyValue = + additionalProperties.entrySet().stream() + .findFirst() + .orElseThrow( + () -> + new IllegalStateException("Missing processor. This is a programming error.")); + SpanProcessor spanProcessor = + FileConfigUtil.loadComponent( + spiHelper, + SpanProcessor.class, + processorKeyValue.getKey(), + processorKeyValue.getValue()); + return FileConfigUtil.addAndReturn(closeables, spanProcessor); + } else { + throw new DeclarativeConfigException("span processor must be set"); } - - return SpanProcessor.composite(); } } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TextMapPropagatorFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TextMapPropagatorFactory.java new file mode 100644 index 00000000000..798a4b337ec --- /dev/null +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TextMapPropagatorFactory.java @@ -0,0 +1,63 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import io.opentelemetry.api.baggage.propagation.W3CBaggagePropagator; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; +import io.opentelemetry.context.propagation.TextMapPropagator; +import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; +import java.io.Closeable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +final class TextMapPropagatorFactory implements Factory, TextMapPropagator> { + + private static final TextMapPropagatorFactory INSTANCE = new TextMapPropagatorFactory(); + + private TextMapPropagatorFactory() {} + + static TextMapPropagatorFactory getInstance() { + return INSTANCE; + } + + @Override + public TextMapPropagator create( + List model, SpiHelper spiHelper, List closeables) { + if (model.isEmpty()) { + model = Arrays.asList("tracecontext", "baggage"); + } + + if (model.contains("none")) { + if (model.size() > 1) { + throw new DeclarativeConfigException( + "propagators contains \"none\" along with other propagators"); + } + return TextMapPropagator.noop(); + } + + List propagators = new ArrayList<>(); + for (String propagator : model) { + propagators.add(getPropagator(spiHelper, propagator)); + } + + return TextMapPropagator.composite(propagators); + } + + private static TextMapPropagator getPropagator(SpiHelper spiHelper, String name) { + if (name.equals("tracecontext")) { + return W3CTraceContextPropagator.getInstance(); + } + if (name.equals("baggage")) { + return W3CBaggagePropagator.getInstance(); + } + + return FileConfigUtil.loadComponent( + spiHelper, TextMapPropagator.class, name, Collections.emptyMap()); + } +} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TracerProviderAndAttributeLimits.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TracerProviderAndAttributeLimits.java index afc1a41a66e..bcc73943b63 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TracerProviderAndAttributeLimits.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TracerProviderAndAttributeLimits.java @@ -6,21 +6,22 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; import com.google.auto.value.AutoValue; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TracerProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TracerProviderModel; import javax.annotation.Nullable; @AutoValue abstract class TracerProviderAndAttributeLimits { static TracerProviderAndAttributeLimits create( - @Nullable AttributeLimits attributeLimits, @Nullable TracerProvider tracerProvider) { + @Nullable AttributeLimitsModel attributeLimits, + @Nullable TracerProviderModel tracerProvider) { return new AutoValue_TracerProviderAndAttributeLimits(attributeLimits, tracerProvider); } @Nullable - abstract AttributeLimits getAttributeLimits(); + abstract AttributeLimitsModel getAttributeLimits(); @Nullable - abstract TracerProvider getTracerProvider(); + abstract TracerProviderModel getTracerProvider(); } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TracerProviderFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TracerProviderFactory.java index e5940749b55..8634710b2c6 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TracerProviderFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TracerProviderFactory.java @@ -6,15 +6,14 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TracerProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TracerProviderModel; import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.SdkTracerProviderBuilder; import io.opentelemetry.sdk.trace.SpanLimits; import io.opentelemetry.sdk.trace.samplers.Sampler; import java.io.Closeable; import java.util.List; -import javax.annotation.Nullable; final class TracerProviderFactory implements Factory { @@ -29,14 +28,9 @@ static TracerProviderFactory getInstance() { @Override public SdkTracerProviderBuilder create( - @Nullable TracerProviderAndAttributeLimits model, - SpiHelper spiHelper, - List closeables) { + TracerProviderAndAttributeLimits model, SpiHelper spiHelper, List closeables) { SdkTracerProviderBuilder builder = SdkTracerProvider.builder(); - if (model == null) { - return builder; - } - TracerProvider tracerProviderModel = model.getTracerProvider(); + TracerProviderModel tracerProviderModel = model.getTracerProvider(); if (tracerProviderModel == null) { return builder; } @@ -50,12 +44,14 @@ public SdkTracerProviderBuilder create( closeables); builder.setSpanLimits(spanLimits); - Sampler sampler = - SamplerFactory.getInstance() - .create(tracerProviderModel.getSampler(), spiHelper, closeables); - builder.setSampler(sampler); + if (tracerProviderModel.getSampler() != null) { + Sampler sampler = + SamplerFactory.getInstance() + .create(tracerProviderModel.getSampler(), spiHelper, closeables); + builder.setSampler(sampler); + } - List processors = tracerProviderModel.getProcessors(); + List processors = tracerProviderModel.getProcessors(); if (processors != null) { processors.forEach( processor -> diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ViewFactory.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ViewFactory.java index 77a66833761..38ad79d7669 100644 --- a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ViewFactory.java +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ViewFactory.java @@ -6,16 +6,17 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Stream; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.IncludeExcludeModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.StreamModel; import io.opentelemetry.sdk.metrics.View; import io.opentelemetry.sdk.metrics.ViewBuilder; import java.io.Closeable; import java.util.HashSet; import java.util.List; +import java.util.Set; import javax.annotation.Nullable; -final class ViewFactory implements Factory { +final class ViewFactory implements Factory { private static final ViewFactory INSTANCE = new ViewFactory(); @@ -26,11 +27,7 @@ static ViewFactory getInstance() { } @Override - public View create(@Nullable Stream model, SpiHelper spiHelper, List closeables) { - if (model == null) { - throw new ConfigurationException("stream must not be null"); - } - + public View create(StreamModel model, SpiHelper spiHelper, List closeables) { ViewBuilder builder = View.builder(); if (model.getName() != null) { builder.setName(model.getName()); @@ -38,8 +35,9 @@ public View create(@Nullable Stream model, SpiHelper spiHelper, List if (model.getDescription() != null) { builder.setDescription(model.getDescription()); } - if (model.getAttributeKeys() != null) { - builder.setAttributeFilter(new HashSet<>(model.getAttributeKeys())); + IncludeExcludeModel attributeKeys = model.getAttributeKeys(); + if (attributeKeys != null) { + addAttributeKeyFilter(builder, attributeKeys.getIncluded(), attributeKeys.getExcluded()); } if (model.getAggregation() != null) { builder.setAggregation( @@ -47,4 +45,25 @@ public View create(@Nullable Stream model, SpiHelper spiHelper, List } return builder.build(); } + + private static void addAttributeKeyFilter( + ViewBuilder builder, @Nullable List included, @Nullable List excluded) { + if (included == null && excluded == null) { + return; + } + if (included == null) { + Set excludedKeys = new HashSet<>(excluded); + // TODO: set predicate with useful toString implementation + builder.setAttributeFilter(attributeKey -> !excludedKeys.contains(attributeKey)); + return; + } + if (excluded == null) { + Set includedKeys = new HashSet<>(included); + builder.setAttributeFilter(includedKeys); + return; + } + Set includedKeys = new HashSet<>(included); + excluded.forEach(includedKeys::remove); + builder.setAttributeFilter(includedKeys); + } } diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/YamlDeclarativeConfigProperties.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/YamlDeclarativeConfigProperties.java new file mode 100644 index 00000000000..8a4a70704fb --- /dev/null +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/fileconfig/YamlDeclarativeConfigProperties.java @@ -0,0 +1,314 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import static java.util.stream.Collectors.joining; +import static java.util.stream.Collectors.toList; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.autoconfigure.internal.ComponentLoader; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfigurationModel; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.StringJoiner; +import javax.annotation.Nullable; + +/** + * Implementation of {@link DeclarativeConfigProperties} which uses a file configuration model as a + * source. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + * @see #getStructured(String) Accessing nested maps + * @see #getStructuredList(String) Accessing lists of maps + * @see DeclarativeConfiguration#toConfigProperties(Object, ComponentLoader) Converting + * configuration model to properties + */ +public final class YamlDeclarativeConfigProperties implements DeclarativeConfigProperties { + + private static final Set> SUPPORTED_SCALAR_TYPES = + Collections.unmodifiableSet( + new LinkedHashSet<>( + Arrays.asList(String.class, Boolean.class, Long.class, Double.class))); + + /** Values are {@link #isPrimitive(Object)}, {@link List} of scalars. */ + private final Map simpleEntries; + + private final Map> listEntries; + private final Map mapEntries; + private final ComponentLoader componentLoader; + + private YamlDeclarativeConfigProperties( + Map simpleEntries, + Map> listEntries, + Map mapEntries, + ComponentLoader componentLoader) { + this.simpleEntries = simpleEntries; + this.listEntries = listEntries; + this.mapEntries = mapEntries; + this.componentLoader = componentLoader; + } + + /** + * Create a {@link YamlDeclarativeConfigProperties} from the {@code properties} map. + * + *

{@code properties} is expected to be the output of YAML parsing (i.e. with Jackson {@code + * com.fasterxml.jackson.databind.ObjectMapper}), and have values which are scalars, lists of + * scalars, lists of maps, and maps. + * + * @see DeclarativeConfiguration#toConfigProperties(OpenTelemetryConfigurationModel) + */ + @SuppressWarnings("unchecked") + static YamlDeclarativeConfigProperties create( + Map properties, ComponentLoader componentLoader) { + Map simpleEntries = new LinkedHashMap<>(); + Map> listEntries = new LinkedHashMap<>(); + Map mapEntries = new LinkedHashMap<>(); + for (Map.Entry entry : properties.entrySet()) { + String key = entry.getKey(); + Object value = entry.getValue(); + if (isPrimitive(value) || value == null) { + simpleEntries.put(key, value); + continue; + } + if (isPrimitiveList(value)) { + simpleEntries.put(key, value); + continue; + } + if (isListOfMaps(value)) { + List list = + ((List>) value) + .stream() + .map(map -> YamlDeclarativeConfigProperties.create(map, componentLoader)) + .collect(toList()); + listEntries.put(key, list); + continue; + } + if (isMap(value)) { + YamlDeclarativeConfigProperties configProperties = + YamlDeclarativeConfigProperties.create((Map) value, componentLoader); + mapEntries.put(key, configProperties); + continue; + } + throw new DeclarativeConfigException( + "Unable to initialize ExtendedConfigProperties. Key \"" + + key + + "\" has unrecognized object type " + + value.getClass().getName()); + } + return new YamlDeclarativeConfigProperties( + simpleEntries, listEntries, mapEntries, componentLoader); + } + + private static boolean isPrimitiveList(Object object) { + if (object instanceof List) { + List list = (List) object; + return list.stream().allMatch(YamlDeclarativeConfigProperties::isPrimitive); + } + return false; + } + + private static boolean isPrimitive(Object object) { + return object instanceof String + || object instanceof Integer + || object instanceof Long + || object instanceof Float + || object instanceof Double + || object instanceof Boolean; + } + + private static boolean isListOfMaps(Object object) { + if (object instanceof List) { + List list = (List) object; + return list.stream() + .allMatch( + entry -> + entry instanceof Map + && ((Map) entry) + .keySet().stream().allMatch(key -> key instanceof String)); + } + return false; + } + + private static boolean isMap(Object object) { + if (object instanceof Map) { + Map map = (Map) object; + return map.keySet().stream().allMatch(entry -> entry instanceof String); + } + return false; + } + + @Nullable + @Override + public String getString(String name) { + return stringOrNull(simpleEntries.get(name)); + } + + @Nullable + @Override + public Boolean getBoolean(String name) { + return booleanOrNull(simpleEntries.get(name)); + } + + @Nullable + @Override + public Integer getInt(String name) { + Object value = simpleEntries.get(name); + if (value instanceof Integer) { + return (Integer) value; + } + if (value instanceof Long) { + return ((Long) value).intValue(); + } + return null; + } + + @Nullable + @Override + public Long getLong(String name) { + return longOrNull(simpleEntries.get(name)); + } + + @Nullable + @Override + public Double getDouble(String name) { + return doubleOrNull(simpleEntries.get(name)); + } + + @Nullable + @Override + @SuppressWarnings("unchecked") + public List getScalarList(String name, Class scalarType) { + if (!SUPPORTED_SCALAR_TYPES.contains(scalarType)) { + throw new DeclarativeConfigException( + "Unsupported scalar type " + + scalarType.getName() + + ". Supported types include " + + SUPPORTED_SCALAR_TYPES.stream() + .map(Class::getName) + .collect(joining(",", "[", "]"))); + } + Object value = simpleEntries.get(name); + if (value instanceof List) { + return (List) + ((List) value) + .stream() + .map( + entry -> { + if (scalarType == String.class) { + return stringOrNull(entry); + } else if (scalarType == Boolean.class) { + return booleanOrNull(entry); + } else if (scalarType == Long.class) { + return longOrNull(entry); + } else if (scalarType == Double.class) { + return doubleOrNull(entry); + } + return null; + }) + .filter(Objects::nonNull) + .collect(toList()); + } + return null; + } + + @Nullable + private static String stringOrNull(@Nullable Object value) { + if (value instanceof String) { + return (String) value; + } + return null; + } + + @Nullable + private static Boolean booleanOrNull(@Nullable Object value) { + if (value instanceof Boolean) { + return (Boolean) value; + } + return null; + } + + @Nullable + private static Long longOrNull(@Nullable Object value) { + if (value instanceof Integer) { + return ((Integer) value).longValue(); + } + if (value instanceof Long) { + return (Long) value; + } + return null; + } + + @Nullable + private static Double doubleOrNull(@Nullable Object value) { + if (value instanceof Float) { + return ((Float) value).doubleValue(); + } + if (value instanceof Double) { + return (Double) value; + } + return null; + } + + @Nullable + @Override + public DeclarativeConfigProperties getStructured(String name) { + return mapEntries.get(name); + } + + @Nullable + @Override + public List getStructuredList(String name) { + List value = listEntries.get(name); + if (value != null) { + return Collections.unmodifiableList(value); + } + return null; + } + + @Override + public Set getPropertyKeys() { + Set keys = new LinkedHashSet<>(); + keys.addAll(simpleEntries.keySet()); + keys.addAll(listEntries.keySet()); + keys.addAll(mapEntries.keySet()); + return Collections.unmodifiableSet(keys); + } + + @Override + public String toString() { + StringJoiner joiner = new StringJoiner(", ", "YamlDeclarativeConfigProperties{", "}"); + simpleEntries.forEach((key, value) -> joiner.add(key + "=" + value)); + listEntries.forEach((key, value) -> joiner.add(key + "=" + value)); + mapEntries.forEach((key, value) -> joiner.add(key + "=" + value)); + return joiner.toString(); + } + + /** Return a map representation of the data. */ + public Map toMap() { + Map result = new HashMap<>(simpleEntries); + listEntries.forEach( + (key, value) -> + result.put( + key, value.stream().map(YamlDeclarativeConfigProperties::toMap).collect(toList()))); + mapEntries.forEach((key, value) -> result.put(key, value.toMap())); + return Collections.unmodifiableMap(result); + } + + /** Return the {@link ComponentLoader}. */ + public ComponentLoader getComponentLoader() { + return componentLoader; + } +} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/resources/ServiceInstanceIdResourceProvider.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/resources/ServiceInstanceIdResourceProvider.java new file mode 100644 index 00000000000..97e84f3c686 --- /dev/null +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/resources/ServiceInstanceIdResourceProvider.java @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.resources; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.ResourceProvider; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ConditionalResourceProvider; +import io.opentelemetry.sdk.resources.Resource; +import java.util.UUID; + +/** + * does not implement {@link ResourceProvider}, because it depends on all attributes discovered by + * the other providers. + */ +public final class ServiceInstanceIdResourceProvider implements ConditionalResourceProvider { + + public static final AttributeKey SERVICE_INSTANCE_ID = + AttributeKey.stringKey("service.instance.id"); + + // multiple calls to this resource provider should return the same value + private static final Resource RANDOM = + Resource.create(Attributes.of(SERVICE_INSTANCE_ID, UUID.randomUUID().toString())); + + static final int ORDER = Integer.MAX_VALUE; + + @Override + public Resource createResource(ConfigProperties config) { + return RANDOM; + } + + @Override + public boolean shouldApply(ConfigProperties config, Resource existing) { + return existing.getAttribute(SERVICE_INSTANCE_ID) == null; + } + + @Override + public int order() { + // Run after environment resource provider - only set the service instance ID if it + // hasn't been set by any other provider or the user. + return ORDER; + } +} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/trace/OnEndSpanProcessor.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/trace/OnEndSpanProcessor.java new file mode 100644 index 00000000000..4a207277201 --- /dev/null +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/trace/OnEndSpanProcessor.java @@ -0,0 +1,49 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.trace; + +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.trace.ReadWriteSpan; +import io.opentelemetry.sdk.trace.ReadableSpan; +import io.opentelemetry.sdk.trace.SpanProcessor; + +/** A SpanProcessor implementation that is only capable of processing spans when they end. */ +public final class OnEndSpanProcessor implements SpanProcessor { + private final OnEnd onEnd; + + private OnEndSpanProcessor(OnEnd onEnd) { + this.onEnd = onEnd; + } + + static SpanProcessor create(OnEnd onEnd) { + return new OnEndSpanProcessor(onEnd); + } + + @Override + public void onEnd(ReadableSpan span) { + onEnd.apply(span); + } + + @Override + public boolean isEndRequired() { + return true; + } + + @Override + public void onStart(Context parentContext, ReadWriteSpan span) { + // nop + } + + @Override + public boolean isStartRequired() { + return false; + } + + @FunctionalInterface + public interface OnEnd { + void apply(ReadableSpan span); + } +} diff --git a/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/trace/OnStartSpanProcessor.java b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/trace/OnStartSpanProcessor.java new file mode 100644 index 00000000000..1ef96c2e883 --- /dev/null +++ b/sdk-extensions/incubator/src/main/java/io/opentelemetry/sdk/extension/incubator/trace/OnStartSpanProcessor.java @@ -0,0 +1,50 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.trace; + +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.trace.ReadWriteSpan; +import io.opentelemetry.sdk.trace.ReadableSpan; +import io.opentelemetry.sdk.trace.SpanProcessor; + +/** A SpanProcessor that only handles onStart(). */ +public final class OnStartSpanProcessor implements SpanProcessor { + + private final OnStart onStart; + + private OnStartSpanProcessor(OnStart onStart) { + this.onStart = onStart; + } + + public static SpanProcessor create(OnStart onStart) { + return new OnStartSpanProcessor(onStart); + } + + @Override + public void onStart(Context parentContext, ReadWriteSpan span) { + onStart.apply(parentContext, span); + } + + @Override + public boolean isStartRequired() { + return true; + } + + @Override + public void onEnd(ReadableSpan span) { + // nop + } + + @Override + public boolean isEndRequired() { + return false; + } + + @FunctionalInterface + public interface OnStart { + void apply(Context context, ReadWriteSpan span); + } +} diff --git a/sdk-extensions/incubator/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.ResourceProvider b/sdk-extensions/incubator/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.ResourceProvider new file mode 100644 index 00000000000..189af738dcf --- /dev/null +++ b/sdk-extensions/incubator/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.ResourceProvider @@ -0,0 +1 @@ +io.opentelemetry.sdk.extension.incubator.resources.ServiceInstanceIdResourceProvider diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AggregationFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AggregationFactoryTest.java index 3b37e2077ad..0efe2691b0a 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AggregationFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AggregationFactoryTest.java @@ -9,32 +9,24 @@ import static org.mockito.Mockito.mock; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Aggregation; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Base2ExponentialBucketHistogram; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExplicitBucketHistogram; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AggregationModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Base2ExponentialBucketHistogramModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.DropModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExplicitBucketHistogramModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LastValueModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SumModel; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; class AggregationFactoryTest { - @Test - void create_Null() { - assertThat( - AggregationFactory.getInstance() - .create(null, mock(SpiHelper.class), Collections.emptyList()) - .toString()) - .isEqualTo(io.opentelemetry.sdk.metrics.Aggregation.defaultAggregation().toString()); - } - @ParameterizedTest @MethodSource("createTestCases") - void create(Aggregation model, io.opentelemetry.sdk.metrics.Aggregation expectedResult) { + void create(AggregationModel model, io.opentelemetry.sdk.metrics.Aggregation expectedResult) { io.opentelemetry.sdk.metrics.Aggregation aggregation = AggregationFactory.getInstance().create(model, mock(SpiHelper.class), new ArrayList<>()); assertThat(aggregation.toString()).isEqualTo(expectedResult.toString()); @@ -43,33 +35,34 @@ void create(Aggregation model, io.opentelemetry.sdk.metrics.Aggregation expected private static Stream createTestCases() { return Stream.of( Arguments.of( - new Aggregation(), io.opentelemetry.sdk.metrics.Aggregation.defaultAggregation()), + new AggregationModel(), io.opentelemetry.sdk.metrics.Aggregation.defaultAggregation()), Arguments.of( - new Aggregation().withDrop(new Object()), + new AggregationModel().withDrop(new DropModel()), io.opentelemetry.sdk.metrics.Aggregation.drop()), Arguments.of( - new Aggregation().withSum(new Object()), + new AggregationModel().withSum(new SumModel()), io.opentelemetry.sdk.metrics.Aggregation.sum()), Arguments.of( - new Aggregation().withLastValue(new Object()), + new AggregationModel().withLastValue(new LastValueModel()), io.opentelemetry.sdk.metrics.Aggregation.lastValue()), Arguments.of( - new Aggregation() - .withBase2ExponentialBucketHistogram(new Base2ExponentialBucketHistogram()), + new AggregationModel() + .withBase2ExponentialBucketHistogram(new Base2ExponentialBucketHistogramModel()), io.opentelemetry.sdk.metrics.Aggregation.base2ExponentialBucketHistogram()), Arguments.of( - new Aggregation() + new AggregationModel() .withBase2ExponentialBucketHistogram( - new Base2ExponentialBucketHistogram().withMaxSize(1).withMaxScale(2)), - io.opentelemetry.sdk.metrics.Aggregation.base2ExponentialBucketHistogram(1, 2)), + new Base2ExponentialBucketHistogramModel().withMaxSize(2).withMaxScale(2)), + io.opentelemetry.sdk.metrics.Aggregation.base2ExponentialBucketHistogram(2, 2)), Arguments.of( - new Aggregation() - .withExplicitBucketHistogram(new ExplicitBucketHistogram().withBoundaries(null)), + new AggregationModel() + .withExplicitBucketHistogram( + new ExplicitBucketHistogramModel().withBoundaries(null)), io.opentelemetry.sdk.metrics.Aggregation.explicitBucketHistogram()), Arguments.of( - new Aggregation() + new AggregationModel() .withExplicitBucketHistogram( - new ExplicitBucketHistogram().withBoundaries(Arrays.asList(1.0, 2.0))), + new ExplicitBucketHistogramModel().withBoundaries(Arrays.asList(1.0, 2.0))), io.opentelemetry.sdk.metrics.Aggregation.explicitBucketHistogram( Arrays.asList(1.0, 2.0)))); } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AttributeListFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AttributeListFactoryTest.java new file mode 100644 index 00000000000..2b3bca05212 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AttributeListFactoryTest.java @@ -0,0 +1,140 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.mock; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeNameValueModel; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Stream; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class AttributeListFactoryTest { + + @ParameterizedTest + @MethodSource("invalidAttributes") + void create_InvalidAttributes(List model, String expectedMessage) { + assertThatThrownBy( + () -> + AttributeListFactory.getInstance() + .create(model, mock(SpiHelper.class), Collections.emptyList())) + .isInstanceOf(DeclarativeConfigException.class) + .hasMessageContaining(expectedMessage); + } + + private static Stream invalidAttributes() { + return Stream.of( + Arguments.of( + Collections.singletonList(new AttributeNameValueModel().withName("key")), + "attribute value is required but is null"), + Arguments.of( + Collections.singletonList( + new AttributeNameValueModel().withName("key").withValue(new Object())), + "Error processing attribute with name \"key\": value did not match type STRING"), + Arguments.of( + Collections.singletonList( + new AttributeNameValueModel() + .withName("key") + .withType(AttributeNameValueModel.Type.INT) + .withValue(Arrays.asList(1L, 1))), + "Error processing attribute with name \"key\": value did not match type INT"), + Arguments.of( + Collections.singletonList( + new AttributeNameValueModel() + .withName("key") + .withType(AttributeNameValueModel.Type.INT) + .withValue(true)), + "Error processing attribute with name \"key\": value did not match type INT")); + } + + @Test + void create() { + Attributes expectedAttributes = + Attributes.builder() + .put("service.name", "my-service") + .put("strKey", "val") + .put("longKey", 1L) + .put("intKey", 2) + .put("doubleKey", 1.0d) + .put("floatKey", 2.0f) + .put("boolKey", true) + .put("strArrKey", "val1", "val2") + .put("longArrKey", 1L, 2L) + .put("intArrKey", 1, 2) + .put("doubleArrKey", 1.0d, 2.0d) + .put("floatArrKey", 1.0f, 2.0f) + .put("boolArrKey", true, false) + .build(); + assertThat( + AttributeListFactory.getInstance() + .create( + Arrays.asList( + new AttributeNameValueModel() + .withName("service.name") + .withValue("my-service"), + new AttributeNameValueModel() + .withName("strKey") + .withValue("val") + .withType(AttributeNameValueModel.Type.STRING), + new AttributeNameValueModel() + .withName("longKey") + .withValue(1L) + .withType(AttributeNameValueModel.Type.INT), + new AttributeNameValueModel() + .withName("intKey") + .withValue(2) + .withType(AttributeNameValueModel.Type.INT), + new AttributeNameValueModel() + .withName("doubleKey") + .withValue(1.0d) + .withType(AttributeNameValueModel.Type.DOUBLE), + new AttributeNameValueModel() + .withName("floatKey") + .withValue(2.0f) + .withType(AttributeNameValueModel.Type.DOUBLE), + new AttributeNameValueModel() + .withName("boolKey") + .withValue(true) + .withType(AttributeNameValueModel.Type.BOOL), + new AttributeNameValueModel() + .withName("strArrKey") + .withValue(Arrays.asList("val1", "val2")) + .withType(AttributeNameValueModel.Type.STRING_ARRAY), + new AttributeNameValueModel() + .withName("longArrKey") + .withValue(Arrays.asList(1L, 2L)) + .withType(AttributeNameValueModel.Type.INT_ARRAY), + new AttributeNameValueModel() + .withName("intArrKey") + .withValue(Arrays.asList(1, 2)) + .withType(AttributeNameValueModel.Type.INT_ARRAY), + new AttributeNameValueModel() + .withName("doubleArrKey") + .withValue(Arrays.asList(1.0d, 2.0d)) + .withType(AttributeNameValueModel.Type.DOUBLE_ARRAY), + new AttributeNameValueModel() + .withName("floatArrKey") + .withValue(Arrays.asList(1.0f, 2.0f)) + .withType(AttributeNameValueModel.Type.DOUBLE_ARRAY), + new AttributeNameValueModel() + .withName("boolArrKey") + .withValue(Arrays.asList(true, false)) + .withType(AttributeNameValueModel.Type.BOOL_ARRAY)), + mock(SpiHelper.class), + Collections.emptyList())) + .isEqualTo(expectedAttributes); + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AttributesFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AttributesFactoryTest.java deleted file mode 100644 index dc32aa6fe28..00000000000 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/AttributesFactoryTest.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.extension.incubator.fileconfig; - -import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.Mockito.mock; - -import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Attributes; -import java.util.Arrays; -import java.util.Collections; -import java.util.stream.Stream; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -class AttributesFactoryTest { - - @Test - void create_Null() { - assertThat( - AttributesFactory.getInstance() - .create(null, mock(SpiHelper.class), Collections.emptyList())) - .isEqualTo(io.opentelemetry.api.common.Attributes.empty()); - } - - @ParameterizedTest - @MethodSource("invalidAttributes") - void create_InvalidAttributes(Attributes model, String expectedMessage) { - assertThatThrownBy( - () -> - AttributesFactory.getInstance() - .create(model, mock(SpiHelper.class), Collections.emptyList())) - .isInstanceOf(ConfigurationException.class) - .hasMessageContaining(expectedMessage); - } - - private static Stream invalidAttributes() { - return Stream.of( - Arguments.of( - new Attributes().withAdditionalProperty("key", null), - "Error processing attribute with key \"key\": unexpected null value"), - Arguments.of( - new Attributes().withAdditionalProperty("key", new Object()), - "Error processing attribute with key \"key\": unrecognized value type java.lang.Object"), - Arguments.of( - new Attributes().withAdditionalProperty("key", Arrays.asList(1L, 1)), - "Error processing attribute with key \"key\": expected value entries to be of type class java.lang.Long but found entry with type class java.lang.Integer"), - Arguments.of( - new Attributes().withAdditionalProperty("key", Arrays.asList(1L, null)), - "Error processing attribute with key \"key\": unexpected null element in value")); - } - - @Test - void create() { - assertThat( - AttributesFactory.getInstance() - .create( - new Attributes() - .withServiceName("my-service") - .withAdditionalProperty("strKey", "val") - .withAdditionalProperty("longKey", 1L) - .withAdditionalProperty("intKey", 2) - .withAdditionalProperty("doubleKey", 1.0d) - .withAdditionalProperty("floatKey", 2.0f) - .withAdditionalProperty("boolKey", true) - .withAdditionalProperty("strArrKey", Arrays.asList("val1", "val2")) - .withAdditionalProperty("longArrKey", Arrays.asList(1L, 2L)) - .withAdditionalProperty("intArrKey", Arrays.asList(1, 2)) - .withAdditionalProperty("doubleArrKey", Arrays.asList(1.0d, 2.0d)) - .withAdditionalProperty("floatArrKey", Arrays.asList(1.0f, 2.0f)) - .withAdditionalProperty("boolArrKey", Arrays.asList(true, false)) - .withAdditionalProperty("emptyArrKey", Collections.emptyList()), - mock(SpiHelper.class), - Collections.emptyList())) - .isEqualTo( - io.opentelemetry.api.common.Attributes.builder() - .put("service.name", "my-service") - .put("strKey", "val") - .put("longKey", 1L) - .put("intKey", 2) - .put("doubleKey", 1.0d) - .put("floatKey", 2.0f) - .put("boolKey", true) - .put("strArrKey", "val1", "val2") - .put("longArrKey", 1L, 2L) - .put("intArrKey", 1, 2) - .put("doubleArrKey", 1.0d, 2.0d) - .put("floatArrKey", 1.0f, 2.0f) - .put("boolArrKey", true, false) - .build()); - } -} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ConfigurationReaderTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ConfigurationReaderTest.java deleted file mode 100644 index 9df4b15d25f..00000000000 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ConfigurationReaderTest.java +++ /dev/null @@ -1,279 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.extension.incubator.fileconfig; - -import static org.assertj.core.api.Assertions.assertThat; - -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Aggregation; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOff; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOn; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Attributes; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchLogRecordProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchSpanProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Console; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExplicitBucketHistogram; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Headers; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LoggerProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MeterProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReader; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfiguration; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Otlp; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetric; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetric.DefaultHistogramAggregation; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ParentBased; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PeriodicMetricReader; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Prometheus; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PullMetricReader; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Resource; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Sampler; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Selector; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleSpanProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Stream; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TraceIdRatioBased; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TracerProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.View; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Zipkin; -import java.io.FileInputStream; -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import org.junit.jupiter.api.Test; - -class ConfigurationReaderTest { - - @Test - void read_KitchenSinkExampleFile() throws IOException { - OpenTelemetryConfiguration expected = new OpenTelemetryConfiguration(); - - expected.withFileFormat("0.1"); - expected.withDisabled(false); - - // General config - Resource resource = - new Resource().withAttributes(new Attributes().withServiceName("unknown_service")); - expected.withResource(resource); - - AttributeLimits attributeLimits = - new AttributeLimits().withAttributeValueLengthLimit(4096).withAttributeCountLimit(128); - expected.withAttributeLimits(attributeLimits); - - List propagators = - Arrays.asList("tracecontext", "baggage", "b3", "b3multi", "jaeger", "xray", "ottrace"); - expected.withPropagators(propagators); - - // TracerProvider config - TracerProvider tracerProvider = new TracerProvider(); - - SpanLimits spanLimits = - new SpanLimits() - .withAttributeValueLengthLimit(4096) - .withAttributeCountLimit(128) - .withEventCountLimit(128) - .withLinkCountLimit(128) - .withEventAttributeCountLimit(128) - .withLinkAttributeCountLimit(128); - tracerProvider.withLimits(spanLimits); - - Sampler sampler = - new Sampler() - .withParentBased( - new ParentBased() - .withRoot( - new Sampler() - .withTraceIdRatioBased(new TraceIdRatioBased().withRatio(0.0001))) - .withRemoteParentSampled(new Sampler().withAlwaysOn(new AlwaysOn())) - .withRemoteParentNotSampled(new Sampler().withAlwaysOff(new AlwaysOff())) - .withLocalParentSampled(new Sampler().withAlwaysOn(new AlwaysOn())) - .withLocalParentNotSampled(new Sampler().withAlwaysOff(new AlwaysOff()))); - tracerProvider.withSampler(sampler); - - SpanProcessor spanProcessor1 = - new SpanProcessor() - .withBatch( - new BatchSpanProcessor() - .withScheduleDelay(5_000) - .withExportTimeout(30_000) - .withMaxQueueSize(2048) - .withMaxExportBatchSize(512) - .withExporter( - new SpanExporter() - .withOtlp( - new Otlp() - .withProtocol("http/protobuf") - .withEndpoint("http://localhost:4318") - .withCertificate("/app/cert.pem") - .withClientKey("/app/cert.pem") - .withClientCertificate("/app/cert.pem") - .withHeaders( - new Headers().withAdditionalProperty("api-key", "1234")) - .withCompression("gzip") - .withTimeout(10_000)))); - SpanProcessor spanProcessor2 = - new SpanProcessor() - .withBatch( - new BatchSpanProcessor() - .withExporter( - new SpanExporter() - .withZipkin( - new Zipkin() - .withEndpoint("http://localhost:9411/api/v2/spans") - .withTimeout(10_000)))); - SpanProcessor spanProcessor3 = - new SpanProcessor() - .withSimple( - new SimpleSpanProcessor() - .withExporter(new SpanExporter().withConsole(new Console()))); - tracerProvider.withProcessors(Arrays.asList(spanProcessor1, spanProcessor2, spanProcessor3)); - - expected.withTracerProvider(tracerProvider); - // end TracerProvider config - - // LoggerProvider config - LoggerProvider loggerProvider = new LoggerProvider(); - - LogRecordLimits logRecordLimits = - new LogRecordLimits().withAttributeValueLengthLimit(4096).withAttributeCountLimit(128); - loggerProvider.withLimits(logRecordLimits); - - LogRecordProcessor logRecordProcessor = - new LogRecordProcessor() - .withBatch( - new BatchLogRecordProcessor() - .withScheduleDelay(5_000) - .withExportTimeout(30_000) - .withMaxQueueSize(2048) - .withMaxExportBatchSize(512) - .withExporter( - new LogRecordExporter() - .withOtlp( - new Otlp() - .withProtocol("http/protobuf") - .withEndpoint("http://localhost:4318") - .withCertificate("/app/cert.pem") - .withClientKey("/app/cert.pem") - .withClientCertificate("/app/cert.pem") - .withHeaders( - new Headers().withAdditionalProperty("api-key", "1234")) - .withCompression("gzip") - .withTimeout(10_000)))); - loggerProvider.withProcessors(Collections.singletonList(logRecordProcessor)); - - expected.withLoggerProvider(loggerProvider); - // end LoggerProvider config - - // MeterProvider config - MeterProvider meterProvider = new MeterProvider(); - - MetricReader metricReader1 = - new MetricReader() - .withPull( - new PullMetricReader() - .withExporter( - new MetricExporter() - .withPrometheus( - new Prometheus().withHost("localhost").withPort(9464)))); - MetricReader metricReader2 = - new MetricReader() - .withPeriodic( - new PeriodicMetricReader() - .withInterval(5_000) - .withTimeout(30_000) - .withExporter( - new MetricExporter() - .withOtlp( - new OtlpMetric() - .withProtocol("http/protobuf") - .withEndpoint("http://localhost:4318") - .withCertificate("/app/cert.pem") - .withClientKey("/app/cert.pem") - .withClientCertificate("/app/cert.pem") - .withHeaders( - new Headers().withAdditionalProperty("api-key", "1234")) - .withCompression("gzip") - .withTimeout(10_000) - .withTemporalityPreference("delta") - .withDefaultHistogramAggregation( - DefaultHistogramAggregation - .BASE_2_EXPONENTIAL_BUCKET_HISTOGRAM)))); - MetricReader metricReader3 = - new MetricReader() - .withPeriodic( - new PeriodicMetricReader() - .withExporter(new MetricExporter().withConsole(new Console()))); - meterProvider.withReaders(Arrays.asList(metricReader1, metricReader2, metricReader3)); - - View view = - new View() - .withSelector( - new Selector() - .withInstrumentName("my-instrument") - .withInstrumentType(Selector.InstrumentType.HISTOGRAM) - .withUnit("ms") - .withMeterName("my-meter") - .withMeterVersion("1.0.0") - .withMeterSchemaUrl("https://opentelemetry.io/schemas/1.16.0")) - .withStream( - new Stream() - .withName("new_instrument_name") - .withDescription("new_description") - .withAggregation( - new Aggregation() - .withExplicitBucketHistogram( - new ExplicitBucketHistogram() - .withBoundaries( - Arrays.asList( - 0.0, 5.0, 10.0, 25.0, 50.0, 75.0, 100.0, 250.0, 500.0, - 750.0, 1000.0, 2500.0, 5000.0, 7500.0, 10000.0)) - .withRecordMinMax(true))) - .withAttributeKeys(Arrays.asList("key1", "key2"))); - meterProvider.withViews(Collections.singletonList(view)); - - expected.withMeterProvider(meterProvider); - // end MeterProvider config - - try (FileInputStream configExampleFile = - new FileInputStream(System.getenv("CONFIG_EXAMPLE_DIR") + "/kitchen-sink.yaml")) { - OpenTelemetryConfiguration config = ConfigurationReader.parse(configExampleFile); - - // General config - assertThat(config.getFileFormat()).isEqualTo("0.1"); - assertThat(config.getResource()).isEqualTo(resource); - assertThat(config.getAttributeLimits()).isEqualTo(attributeLimits); - assertThat(config.getPropagators()).isEqualTo(propagators); - - // TracerProvider config - TracerProvider configTracerProvider = config.getTracerProvider(); - assertThat(configTracerProvider.getLimits()).isEqualTo(spanLimits); - assertThat(configTracerProvider.getSampler()).isEqualTo(sampler); - assertThat(configTracerProvider.getProcessors()) - .isEqualTo(Arrays.asList(spanProcessor1, spanProcessor2, spanProcessor3)); - - // LoggerProvider config - LoggerProvider configLoggerProvider = config.getLoggerProvider(); - assertThat(configLoggerProvider.getLimits()).isEqualTo(logRecordLimits); - assertThat(configLoggerProvider.getProcessors()) - .isEqualTo(Collections.singletonList(logRecordProcessor)); - - // MeterProvider config - MeterProvider configMeterProvider = config.getMeterProvider(); - assertThat(configMeterProvider.getReaders()) - .isEqualTo(Arrays.asList(metricReader1, metricReader2, metricReader3)); - assertThat(configMeterProvider.getViews()).isEqualTo(Collections.singletonList(view)); - - // All configuration - assertThat(config).isEqualTo(expected); - } - } -} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ConfigurationFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationCreateTest.java similarity index 55% rename from sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ConfigurationFactoryTest.java rename to sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationCreateTest.java index 2054e967987..d512ee9f6d3 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ConfigurationFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationCreateTest.java @@ -12,8 +12,13 @@ import com.linecorp.armeria.testing.junit5.server.SelfSignedCertificateExtension; import io.github.netmikey.logunit.api.LogCapturer; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.internal.testing.CleanupExtension; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfigurationModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TracerProviderModel; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; @@ -22,13 +27,14 @@ import java.nio.file.Files; import java.nio.file.Path; import java.security.cert.CertificateEncodingException; +import java.util.Collections; import java.util.Objects; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.api.io.TempDir; import org.slf4j.event.Level; -class ConfigurationFactoryTest { +class DeclarativeConfigurationCreateTest { @RegisterExtension static final SelfSignedCertificateExtension serverTls = new SelfSignedCertificateExtension(); @@ -40,25 +46,15 @@ class ConfigurationFactoryTest { @RegisterExtension LogCapturer logCapturer = - LogCapturer.create().captureForLogger(ConfigurationFactory.class.getName(), Level.TRACE); - - @Test - void parseAndInterpret_BadInputStream() { - assertThatThrownBy( - () -> - ConfigurationFactory.parseAndInterpret( - new ByteArrayInputStream("foo".getBytes(StandardCharsets.UTF_8)))) - .isInstanceOf(ConfigurationException.class) - .hasMessage("Unable to parse inputStream"); - } + LogCapturer.create().captureForLogger(DeclarativeConfiguration.class.getName(), Level.TRACE); /** * Verify each example in open-telemetry/opentelemetry-configuration/examples - * can pass {@link ConfigurationFactory#parseAndInterpret(InputStream)}. + * can pass {@link DeclarativeConfiguration#parseAndCreate(InputStream)}. */ @Test - void parseAndInterpret_Examples(@TempDir Path tempDir) + void parseAndCreate_Examples(@TempDir Path tempDir) throws IOException, CertificateEncodingException { // Write certificates to temp files String certificatePath = @@ -71,7 +67,7 @@ void parseAndInterpret_Examples(@TempDir Path tempDir) tempDir, "clientCertificate.cert", clientTls.certificate().getEncoded()); File examplesDir = new File(System.getenv("CONFIG_EXAMPLE_DIR")); - assertThat(examplesDir.isDirectory()).isTrue(); + assertThat(examplesDir).isDirectory(); for (File example : Objects.requireNonNull(examplesDir.listFiles())) { // Skip anchors.yaml because support for merge (i.e. "<<: *anchor") was explicitly removed in @@ -91,29 +87,33 @@ void parseAndInterpret_Examples(@TempDir Path tempDir) String rewrittenExampleContent = exampleContent .replaceAll( - "certificate: .*\n", "certificate: " + certificatePath + System.lineSeparator()) + "certificate: .*\n", + "certificate: " + certificatePath.replace("\\", "\\\\") + System.lineSeparator()) .replaceAll( - "client_key: .*\n", "client_key: " + clientKeyPath + System.lineSeparator()) + "client_key: .*\n", + "client_key: " + clientKeyPath.replace("\\", "\\\\") + System.lineSeparator()) .replaceAll( "client_certificate: .*\n", - "client_certificate: " + clientCertificatePath + System.lineSeparator()); + "client_certificate: " + + clientCertificatePath.replace("\\", "\\\\") + + System.lineSeparator()); InputStream is = new ByteArrayInputStream(rewrittenExampleContent.getBytes(StandardCharsets.UTF_8)); // Verify that file can be parsed and interpreted without error - assertThatCode(() -> cleanup.addCloseable(ConfigurationFactory.parseAndInterpret(is))) + assertThatCode(() -> cleanup.addCloseable(DeclarativeConfiguration.parseAndCreate(is))) .as("Example file: " + example.getName()) .doesNotThrowAnyException(); } } @Test - void parseAndInterpret_Exception_CleansUpPartials() { + void parseAndCreate_Exception_CleansUpPartials() { // Trigger an exception after some components have been configured by adding a valid batch // exporter with OTLP exporter, following by invalid batch exporter which references invalid // exporter "foo". String yaml = - "file_format: \"0.1\"\n" + "file_format: \"0.3\"\n" + "logger_provider:\n" + " processors:\n" + " - batch:\n" @@ -125,14 +125,61 @@ void parseAndInterpret_Exception_CleansUpPartials() { assertThatThrownBy( () -> - ConfigurationFactory.parseAndInterpret( + DeclarativeConfiguration.parseAndCreate( new ByteArrayInputStream(yaml.getBytes(StandardCharsets.UTF_8)))) - .isInstanceOf(ConfigurationException.class) - .hasMessage("Unrecognized log record exporter(s): [foo]"); + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage( + "No component provider detected for io.opentelemetry.sdk.logs.export.LogRecordExporter with name \"foo\"."); logCapturer.assertContains( - "Error encountered interpreting configuration. Closing partially configured components."); + "Error encountered interpreting model. Closing partially configured components."); logCapturer.assertContains( - "Closing io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter"); + "Closing io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporter"); logCapturer.assertContains("Closing io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor"); } + + @Test + void parseAndCreate_EmptyComponentProviderConfig() { + String yaml = + "file_format: \"0.3\"\n" + + "logger_provider:\n" + + " processors:\n" + + " - test:\n" + + "tracer_provider:\n" + + " processors:\n" + + " - test:\n"; + + assertThatCode( + () -> + DeclarativeConfiguration.parseAndCreate( + new ByteArrayInputStream(yaml.getBytes(StandardCharsets.UTF_8)))) + .doesNotThrowAnyException(); + } + + @Test + void create_ModelCustomizer() { + OpenTelemetryConfigurationModel model = new OpenTelemetryConfigurationModel(); + model.withFileFormat("0.3"); + model.withTracerProvider( + new TracerProviderModel() + .withProcessors( + Collections.singletonList( + new SpanProcessorModel().withAdditionalProperty("test", null)))); + OpenTelemetrySdk sdk = + DeclarativeConfiguration.create( + model, + // customizer is TestDeclarativeConfigurationCustomizerProvider + SpiHelper.serviceComponentLoader( + DeclarativeConfigurationCreateTest.class.getClassLoader())); + assertThat(sdk.toString()) + .contains( + "resource=Resource{schemaUrl=null, attributes={" + + "color=\"blue\", " + + "foo=\"bar\", " + + "order=\"second\", " + + "service.name=\"unknown_service:java\", " + + "shape=\"square\", " + + "telemetry.sdk.language=\"java\", " + + "telemetry.sdk.name=\"opentelemetry\", " + + "telemetry.sdk.version=\""); + } } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationParseTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationParseTest.java new file mode 100644 index 00000000000..f8ad7330c07 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/DeclarativeConfigurationParseTest.java @@ -0,0 +1,719 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AggregationModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOffModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOnModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeNameValueModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchLogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchSpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ClientModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ConsoleModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.DetectorAttributesModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.DetectorsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExplicitBucketHistogramModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.GeneralInstrumentationModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.HttpModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.IncludeExcludeModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.InstrumentationModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LanguageSpecificInstrumentationModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LoggerProviderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MeterProviderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricProducerModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.NameStringValuePairModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfigurationModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpencensusModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetricModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ParentBasedModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PeerModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PeriodicMetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PrometheusModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PropagatorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PullMetricExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PullMetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PushMetricExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ResourceModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SamplerModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SelectorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ServerModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ServiceMappingModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleLogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleSpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.StreamModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TraceIdRatioBasedModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TracerProviderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ViewModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ZipkinModel; +import java.io.ByteArrayInputStream; +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.AbstractMap; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import javax.annotation.Nullable; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class DeclarativeConfigurationParseTest { + + @Test + void parse_BadInputStream() { + assertThatThrownBy( + () -> + DeclarativeConfiguration.parseAndCreate( + new ByteArrayInputStream("foo".getBytes(StandardCharsets.UTF_8)))) + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage("Unable to parse configuration input stream"); + } + + @Test + void parse_KitchenSinkExampleFile() throws IOException { + OpenTelemetryConfigurationModel expected = new OpenTelemetryConfigurationModel(); + + expected.withFileFormat("0.3"); + expected.withDisabled(false); + + // General config + ResourceModel resource = + new ResourceModel() + .withAttributes( + Arrays.asList( + new AttributeNameValueModel() + .withName("service.name") + .withValue("unknown_service"), + new AttributeNameValueModel() + .withName("string_key") + .withValue("value") + .withType(AttributeNameValueModel.Type.STRING), + new AttributeNameValueModel() + .withName("bool_key") + .withValue(true) + .withType(AttributeNameValueModel.Type.BOOL), + new AttributeNameValueModel() + .withName("int_key") + .withValue(1) + .withType(AttributeNameValueModel.Type.INT), + new AttributeNameValueModel() + .withName("double_key") + .withValue(1.1) + .withType(AttributeNameValueModel.Type.DOUBLE), + new AttributeNameValueModel() + .withName("string_array_key") + .withValue(Arrays.asList("value1", "value2")) + .withType(AttributeNameValueModel.Type.STRING_ARRAY), + new AttributeNameValueModel() + .withName("bool_array_key") + .withValue(Arrays.asList(true, false)) + .withType(AttributeNameValueModel.Type.BOOL_ARRAY), + new AttributeNameValueModel() + .withName("int_array_key") + .withValue(Arrays.asList(1, 2)) + .withType(AttributeNameValueModel.Type.INT_ARRAY), + new AttributeNameValueModel() + .withName("double_array_key") + .withValue(Arrays.asList(1.1, 2.2)) + .withType(AttributeNameValueModel.Type.DOUBLE_ARRAY))) + .withAttributesList("service.namespace=my-namespace,service.version=1.0.0") + .withDetectors( + new DetectorsModel() + .withAttributes( + new DetectorAttributesModel() + .withIncluded(Collections.singletonList("process.*")) + .withExcluded(Collections.singletonList("process.command_args")))) + .withSchemaUrl("https://opentelemetry.io/schemas/1.16.0"); + expected.withResource(resource); + + AttributeLimitsModel attributeLimits = + new AttributeLimitsModel().withAttributeValueLengthLimit(4096).withAttributeCountLimit(128); + expected.withAttributeLimits(attributeLimits); + + PropagatorModel propagator = + new PropagatorModel() + .withComposite( + Arrays.asList( + "tracecontext", "baggage", "b3", "b3multi", "jaeger", "xray", "ottrace")); + expected.withPropagator(propagator); + + // TracerProvider config + TracerProviderModel tracerProvider = new TracerProviderModel(); + + SpanLimitsModel spanLimits = + new SpanLimitsModel() + .withAttributeValueLengthLimit(4096) + .withAttributeCountLimit(128) + .withEventCountLimit(128) + .withLinkCountLimit(128) + .withEventAttributeCountLimit(128) + .withLinkAttributeCountLimit(128); + tracerProvider.withLimits(spanLimits); + + SamplerModel sampler = + new SamplerModel() + .withParentBased( + new ParentBasedModel() + .withRoot( + new SamplerModel() + .withTraceIdRatioBased(new TraceIdRatioBasedModel().withRatio(0.0001))) + .withRemoteParentSampled(new SamplerModel().withAlwaysOn(new AlwaysOnModel())) + .withRemoteParentNotSampled( + new SamplerModel().withAlwaysOff(new AlwaysOffModel())) + .withLocalParentSampled(new SamplerModel().withAlwaysOn(new AlwaysOnModel())) + .withLocalParentNotSampled( + new SamplerModel().withAlwaysOff(new AlwaysOffModel()))); + tracerProvider.withSampler(sampler); + + SpanProcessorModel spanProcessor1 = + new SpanProcessorModel() + .withBatch( + new BatchSpanProcessorModel() + .withScheduleDelay(5_000) + .withExportTimeout(30_000) + .withMaxQueueSize(2048) + .withMaxExportBatchSize(512) + .withExporter( + new SpanExporterModel() + .withOtlp( + new OtlpModel() + .withProtocol("http/protobuf") + .withEndpoint("http://localhost:4318/v1/traces") + .withCertificate("/app/cert.pem") + .withClientKey("/app/cert.pem") + .withClientCertificate("/app/cert.pem") + .withHeaders( + Collections.singletonList( + new NameStringValuePairModel() + .withName("api-key") + .withValue("1234"))) + .withHeadersList("api-key=1234") + .withCompression("gzip") + .withTimeout(10_000) + .withInsecure(false)))); + SpanProcessorModel spanProcessor2 = + new SpanProcessorModel() + .withBatch( + new BatchSpanProcessorModel() + .withExporter( + new SpanExporterModel() + .withZipkin( + new ZipkinModel() + .withEndpoint("http://localhost:9411/api/v2/spans") + .withTimeout(10_000)))); + SpanProcessorModel spanProcessor3 = + new SpanProcessorModel() + .withSimple( + new SimpleSpanProcessorModel() + .withExporter(new SpanExporterModel().withConsole(new ConsoleModel()))); + tracerProvider.withProcessors(Arrays.asList(spanProcessor1, spanProcessor2, spanProcessor3)); + + expected.withTracerProvider(tracerProvider); + // end TracerProvider config + + // LoggerProvider config + LoggerProviderModel loggerProvider = new LoggerProviderModel(); + + LogRecordLimitsModel logRecordLimits = + new LogRecordLimitsModel().withAttributeValueLengthLimit(4096).withAttributeCountLimit(128); + loggerProvider.withLimits(logRecordLimits); + + LogRecordProcessorModel logRecordProcessor1 = + new LogRecordProcessorModel() + .withBatch( + new BatchLogRecordProcessorModel() + .withScheduleDelay(5_000) + .withExportTimeout(30_000) + .withMaxQueueSize(2048) + .withMaxExportBatchSize(512) + .withExporter( + new LogRecordExporterModel() + .withOtlp( + new OtlpModel() + .withProtocol("http/protobuf") + .withEndpoint("http://localhost:4318/v1/logs") + .withCertificate("/app/cert.pem") + .withClientKey("/app/cert.pem") + .withClientCertificate("/app/cert.pem") + .withHeaders( + Collections.singletonList( + new NameStringValuePairModel() + .withName("api-key") + .withValue("1234"))) + .withHeadersList("api-key=1234") + .withCompression("gzip") + .withTimeout(10_000) + .withInsecure(false)))); + LogRecordProcessorModel logRecordProcessor2 = + new LogRecordProcessorModel() + .withSimple( + new SimpleLogRecordProcessorModel() + .withExporter(new LogRecordExporterModel().withConsole(new ConsoleModel()))); + loggerProvider.withProcessors(Arrays.asList(logRecordProcessor1, logRecordProcessor2)); + + expected.withLoggerProvider(loggerProvider); + // end LoggerProvider config + + // MeterProvider config + MeterProviderModel meterProvider = new MeterProviderModel(); + + MetricReaderModel metricReader1 = + new MetricReaderModel() + .withPull( + new PullMetricReaderModel() + .withExporter( + new PullMetricExporterModel() + .withPrometheus( + new PrometheusModel() + .withHost("localhost") + .withPort(9464) + .withWithoutUnits(false) + .withWithoutTypeSuffix(false) + .withWithoutScopeInfo(false) + .withWithResourceConstantLabels( + new IncludeExcludeModel() + .withIncluded(Collections.singletonList("service*")) + .withExcluded( + Collections.singletonList("service.attr1")))))) + .withProducers( + Collections.singletonList( + new MetricProducerModel().withOpencensus(new OpencensusModel()))); + MetricReaderModel metricReader2 = + new MetricReaderModel() + .withPeriodic( + new PeriodicMetricReaderModel() + .withInterval(5_000) + .withTimeout(30_000) + .withExporter( + new PushMetricExporterModel() + .withOtlp( + new OtlpMetricModel() + .withProtocol("http/protobuf") + .withEndpoint("http://localhost:4318/v1/metrics") + .withCertificate("/app/cert.pem") + .withClientKey("/app/cert.pem") + .withClientCertificate("/app/cert.pem") + .withHeaders( + Collections.singletonList( + new NameStringValuePairModel() + .withName("api-key") + .withValue("1234"))) + .withHeadersList("api-key=1234") + .withCompression("gzip") + .withTimeout(10_000) + .withInsecure(false) + .withTemporalityPreference("delta") + .withDefaultHistogramAggregation( + OtlpMetricModel.DefaultHistogramAggregation + .BASE_2_EXPONENTIAL_BUCKET_HISTOGRAM)))) + .withProducers( + Collections.singletonList( + new MetricProducerModel() + .withAdditionalProperty("prometheus", Collections.emptyMap()))); + MetricReaderModel metricReader3 = + new MetricReaderModel() + .withPeriodic( + new PeriodicMetricReaderModel() + .withExporter(new PushMetricExporterModel().withConsole(new ConsoleModel()))); + meterProvider.withReaders(Arrays.asList(metricReader1, metricReader2, metricReader3)); + + ViewModel view = + new ViewModel() + .withSelector( + new SelectorModel() + .withInstrumentName("my-instrument") + .withInstrumentType(SelectorModel.InstrumentType.HISTOGRAM) + .withUnit("ms") + .withMeterName("my-meter") + .withMeterVersion("1.0.0") + .withMeterSchemaUrl("https://opentelemetry.io/schemas/1.16.0")) + .withStream( + new StreamModel() + .withName("new_instrument_name") + .withDescription("new_description") + .withAggregation( + new AggregationModel() + .withExplicitBucketHistogram( + new ExplicitBucketHistogramModel() + .withBoundaries( + Arrays.asList( + 0.0, 5.0, 10.0, 25.0, 50.0, 75.0, 100.0, 250.0, 500.0, + 750.0, 1000.0, 2500.0, 5000.0, 7500.0, 10000.0)) + .withRecordMinMax(true))) + .withAttributeKeys( + new IncludeExcludeModel() + .withIncluded(Arrays.asList("key1", "key2")) + .withExcluded(Collections.singletonList("key3")))); + meterProvider.withViews(Collections.singletonList(view)); + + expected.withMeterProvider(meterProvider); + // end MeterProvider config + + // start instrumentation config + InstrumentationModel instrumentation = + new InstrumentationModel() + .withGeneral( + new GeneralInstrumentationModel() + .withPeer( + new PeerModel() + .withServiceMapping( + Arrays.asList( + new ServiceMappingModel() + .withPeer("1.2.3.4") + .withService("FooService"), + new ServiceMappingModel() + .withPeer("2.3.4.5") + .withService("BarService")))) + .withHttp( + new HttpModel() + .withClient( + new ClientModel() + .withRequestCapturedHeaders( + Arrays.asList("Content-Type", "Accept")) + .withResponseCapturedHeaders( + Arrays.asList("Content-Type", "Content-Encoding"))) + .withServer( + new ServerModel() + .withRequestCapturedHeaders( + Arrays.asList("Content-Type", "Accept")) + .withResponseCapturedHeaders( + Arrays.asList("Content-Type", "Content-Encoding"))))) + .withCpp( + new LanguageSpecificInstrumentationModel() + .withAdditionalProperty( + "example", Collections.singletonMap("property", "value"))) + .withDotnet( + new LanguageSpecificInstrumentationModel() + .withAdditionalProperty( + "example", Collections.singletonMap("property", "value"))) + .withErlang( + new LanguageSpecificInstrumentationModel() + .withAdditionalProperty( + "example", Collections.singletonMap("property", "value"))) + .withGo( + new LanguageSpecificInstrumentationModel() + .withAdditionalProperty( + "example", Collections.singletonMap("property", "value"))) + .withJava( + new LanguageSpecificInstrumentationModel() + .withAdditionalProperty( + "example", Collections.singletonMap("property", "value"))) + .withJs( + new LanguageSpecificInstrumentationModel() + .withAdditionalProperty( + "example", Collections.singletonMap("property", "value"))) + .withPhp( + new LanguageSpecificInstrumentationModel() + .withAdditionalProperty( + "example", Collections.singletonMap("property", "value"))) + .withPython( + new LanguageSpecificInstrumentationModel() + .withAdditionalProperty( + "example", Collections.singletonMap("property", "value"))) + .withRuby( + new LanguageSpecificInstrumentationModel() + .withAdditionalProperty( + "example", Collections.singletonMap("property", "value"))) + .withRust( + new LanguageSpecificInstrumentationModel() + .withAdditionalProperty( + "example", Collections.singletonMap("property", "value"))) + .withSwift( + new LanguageSpecificInstrumentationModel() + .withAdditionalProperty( + "example", Collections.singletonMap("property", "value"))); + expected.withInstrumentation(instrumentation); + // end instrumentation config + + try (FileInputStream configExampleFile = + new FileInputStream(System.getenv("CONFIG_EXAMPLE_DIR") + "/kitchen-sink.yaml")) { + OpenTelemetryConfigurationModel config = DeclarativeConfiguration.parse(configExampleFile); + + // General config + assertThat(config.getFileFormat()).isEqualTo("0.3"); + assertThat(config.getResource()).isEqualTo(resource); + assertThat(config.getAttributeLimits()).isEqualTo(attributeLimits); + assertThat(config.getPropagator()).isEqualTo(propagator); + + // TracerProvider config + TracerProviderModel configTracerProvider = config.getTracerProvider(); + assertThat(configTracerProvider.getLimits()).isEqualTo(spanLimits); + assertThat(configTracerProvider.getSampler()).isEqualTo(sampler); + assertThat(configTracerProvider.getProcessors()) + .isEqualTo(Arrays.asList(spanProcessor1, spanProcessor2, spanProcessor3)); + + // LoggerProvider config + LoggerProviderModel configLoggerProvider = config.getLoggerProvider(); + assertThat(configLoggerProvider.getLimits()).isEqualTo(logRecordLimits); + assertThat(configLoggerProvider.getProcessors()) + .isEqualTo(Arrays.asList(logRecordProcessor1, logRecordProcessor2)); + + // MeterProvider config + MeterProviderModel configMeterProvider = config.getMeterProvider(); + assertThat(configMeterProvider.getReaders()) + .isEqualTo(Arrays.asList(metricReader1, metricReader2, metricReader3)); + assertThat(configMeterProvider.getViews()).isEqualTo(Collections.singletonList(view)); + + // Instrumentation config + InstrumentationModel configInstrumentation = config.getInstrumentation(); + assertThat(configInstrumentation).isEqualTo(instrumentation); + + // All configuration + assertThat(config).isEqualTo(expected); + } + } + + @Test + void parse_nullValuesParsedToEmptyObjects() { + String objectPlaceholderString = + "file_format: \"0.3\"\n" + + "tracer_provider:\n" + + " processors:\n" + + " - batch:\n" + + " exporter:\n" + + " console: {}\n" + + "meter_provider:\n" + + " views:\n" + + " - selector:\n" + + " instrument_type: histogram\n" + + " stream:\n" + + " aggregation:\n" + + " drop: {}\n"; + OpenTelemetryConfigurationModel objectPlaceholderModel = + DeclarativeConfiguration.parse( + new ByteArrayInputStream(objectPlaceholderString.getBytes(StandardCharsets.UTF_8))); + + String noOjbectPlaceholderString = + "file_format: \"0.3\"\n" + + "tracer_provider:\n" + + " processors:\n" + + " - batch:\n" + + " exporter:\n" + + " console:\n" + + "meter_provider:\n" + + " views:\n" + + " - selector:\n" + + " instrument_type: histogram\n" + + " stream:\n" + + " aggregation:\n" + + " drop:\n"; + OpenTelemetryConfigurationModel noObjectPlaceholderModel = + DeclarativeConfiguration.parse( + new ByteArrayInputStream(noOjbectPlaceholderString.getBytes(StandardCharsets.UTF_8))); + + SpanExporterModel exporter = + noObjectPlaceholderModel + .getTracerProvider() + .getProcessors() + .get(0) + .getBatch() + .getExporter(); + assertThat(exporter.getConsole()).isNotNull(); + assertThat(exporter.getOtlp()).isNull(); + + AggregationModel aggregation = + noObjectPlaceholderModel.getMeterProvider().getViews().get(0).getStream().getAggregation(); + assertThat(aggregation.getDrop()).isNotNull(); + assertThat(aggregation.getSum()).isNull(); + + assertThat(objectPlaceholderModel).isEqualTo(noObjectPlaceholderModel); + } + + @Test + void parse_nullBoxedPrimitivesParsedToNull() { + String yaml = + "file_format:\n" // String + + "disabled:\n" // Boolean + + "attribute_limits:\n" + + " attribute_value_length_limit:\n" // Integer + + "tracer_provider:\n" + + " sampler:\n" + + " trace_id_ratio_based:\n" + + " ratio:\n"; // Double + + OpenTelemetryConfigurationModel model = + DeclarativeConfiguration.parse( + new ByteArrayInputStream(yaml.getBytes(StandardCharsets.UTF_8))); + + assertThat(model.getFileFormat()).isNull(); + assertThat(model.getDisabled()).isNull(); + assertThat(model.getAttributeLimits().getAttributeValueLengthLimit()).isNull(); + assertThat(model.getTracerProvider().getSampler().getTraceIdRatioBased().getRatio()).isNull(); + + assertThat(model) + .isEqualTo( + new OpenTelemetryConfigurationModel() + .withAttributeLimits(new AttributeLimitsModel()) + .withTracerProvider( + new TracerProviderModel() + .withSampler( + new SamplerModel() + .withTraceIdRatioBased(new TraceIdRatioBasedModel())))); + } + + @ParameterizedTest + @MethodSource("coreSchemaValuesArgs") + void coreSchemaValues(String rawYaml, Object expectedYamlResult) { + Object yaml = + DeclarativeConfiguration.loadYaml( + new ByteArrayInputStream(rawYaml.getBytes(StandardCharsets.UTF_8)), + Collections.emptyMap()); + assertThat(yaml).isEqualTo(expectedYamlResult); + } + + @SuppressWarnings("unchecked") + private static java.util.stream.Stream coreSchemaValuesArgs() { + return java.util.stream.Stream.of( + Arguments.of("key1: 0o123\n", mapOf(entry("key1", 83))), + Arguments.of("key1: 0123\n", mapOf(entry("key1", 123))), + Arguments.of("key1: 0xdeadbeef\n", mapOf(entry("key1", 3735928559L))), + Arguments.of("key1: \"0xdeadbeef\"\n", mapOf(entry("key1", "0xdeadbeef")))); + } + + @ParameterizedTest + @MethodSource("envVarSubstitutionArgs") + void envSubstituteAndLoadYaml(String rawYaml, Object expectedYamlResult) { + Map environmentVariables = new HashMap<>(); + environmentVariables.put("STR_1", "value1"); + environmentVariables.put("STR_2", "value2"); + environmentVariables.put("EMPTY_STR", ""); + environmentVariables.put("BOOL", "true"); + environmentVariables.put("INT", "1"); + environmentVariables.put("FLOAT", "1.1"); + environmentVariables.put("HEX", "0xdeadbeef"); + + Object yaml = + DeclarativeConfiguration.loadYaml( + new ByteArrayInputStream(rawYaml.getBytes(StandardCharsets.UTF_8)), + environmentVariables); + assertThat(yaml).isEqualTo(expectedYamlResult); + } + + @SuppressWarnings("unchecked") + private static java.util.stream.Stream envVarSubstitutionArgs() { + return java.util.stream.Stream.of( + // Simple cases + Arguments.of("key1: ${STR_1}\n", mapOf(entry("key1", "value1"))), + Arguments.of("key1: ${BOOL}\n", mapOf(entry("key1", true))), + Arguments.of("key1: ${INT}\n", mapOf(entry("key1", 1))), + Arguments.of("key1: ${FLOAT}\n", mapOf(entry("key1", 1.1))), + Arguments.of("key1: ${HEX}\n", mapOf(entry("key1", 3735928559L))), + Arguments.of( + "key1: ${STR_1}\n" + "key2: value2\n", + mapOf(entry("key1", "value1"), entry("key2", "value2"))), + Arguments.of( + "key1: ${STR_1} value1\n" + "key2: value2\n", + mapOf(entry("key1", "value1 value1"), entry("key2", "value2"))), + // Default cases + Arguments.of("key1: ${NOT_SET:-value1}\n", mapOf(entry("key1", "value1"))), + Arguments.of("key1: ${NOT_SET:-true}\n", mapOf(entry("key1", true))), + Arguments.of("key1: ${NOT_SET:-1}\n", mapOf(entry("key1", 1))), + Arguments.of("key1: ${NOT_SET:-1.1}\n", mapOf(entry("key1", 1.1))), + Arguments.of("key1: ${NOT_SET:-0xdeadbeef}\n", mapOf(entry("key1", 3735928559L))), + Arguments.of( + "key1: ${NOT_SET:-value1} value2\n" + "key2: value2\n", + mapOf(entry("key1", "value1 value2"), entry("key2", "value2"))), + // Multiple environment variables referenced + Arguments.of("key1: ${STR_1}${STR_2}\n", mapOf(entry("key1", "value1value2"))), + Arguments.of("key1: ${STR_1} ${STR_2}\n", mapOf(entry("key1", "value1 value2"))), + Arguments.of( + "key1: ${STR_1} ${NOT_SET:-default} ${STR_2}\n", + mapOf(entry("key1", "value1 default value2"))), + // Undefined / empty environment variable + Arguments.of("key1: ${EMPTY_STR}\n", mapOf(entry("key1", null))), + Arguments.of("key1: ${STR_3}\n", mapOf(entry("key1", null))), + Arguments.of("key1: ${STR_1} ${STR_3}\n", mapOf(entry("key1", "value1"))), + // Environment variable keys must match pattern: [a-zA-Z_]+[a-zA-Z0-9_]* + Arguments.of("key1: ${VAR&}\n", mapOf(entry("key1", "${VAR&}"))), + // Environment variable substitution only takes place in scalar values of maps + Arguments.of("${STR_1}: value1\n", mapOf(entry("${STR_1}", "value1"))), + Arguments.of( + "key1:\n ${STR_1}: value1\n", + mapOf(entry("key1", mapOf(entry("${STR_1}", "value1"))))), + Arguments.of( + "key1:\n - ${STR_1}\n", mapOf(entry("key1", Collections.singletonList("${STR_1}")))), + // Quoted environment variables + Arguments.of("key1: \"${HEX}\"\n", mapOf(entry("key1", "0xdeadbeef"))), + Arguments.of("key1: \"${STR_1}\"\n", mapOf(entry("key1", "value1"))), + Arguments.of("key1: \"${EMPTY_STR}\"\n", mapOf(entry("key1", ""))), + Arguments.of("key1: \"${BOOL}\"\n", mapOf(entry("key1", "true"))), + Arguments.of("key1: \"${INT}\"\n", mapOf(entry("key1", "1"))), + Arguments.of("key1: \"${FLOAT}\"\n", mapOf(entry("key1", "1.1")))); + } + + private static Map.Entry entry(K key, @Nullable V value) { + return new AbstractMap.SimpleEntry<>(key, value); + } + + @SuppressWarnings("unchecked") + private static Map mapOf(Map.Entry... entries) { + Map result = new HashMap<>(); + for (Map.Entry entry : entries) { + result.put(entry.getKey(), entry.getValue()); + } + return result; + } + + @Test + void read_WithEnvironmentVariables() { + String yaml = + "file_format: \"0.3\"\n" + + "tracer_provider:\n" + + " processors:\n" + + " - batch:\n" + + " exporter:\n" + + " otlp:\n" + + " endpoint: ${OTEL_EXPORTER_OTLP_ENDPOINT}\n" + + " - batch:\n" + + " exporter:\n" + + " otlp:\n" + + " endpoint: ${UNSET_ENV_VAR}\n"; + Map envVars = new HashMap<>(); + envVars.put("OTEL_EXPORTER_OTLP_ENDPOINT", "http://collector:4317"); + OpenTelemetryConfigurationModel model = + DeclarativeConfiguration.parse( + new ByteArrayInputStream(yaml.getBytes(StandardCharsets.UTF_8)), envVars); + assertThat(model) + .isEqualTo( + new OpenTelemetryConfigurationModel() + .withFileFormat("0.3") + .withTracerProvider( + new TracerProviderModel() + .withProcessors( + Arrays.asList( + new SpanProcessorModel() + .withBatch( + new BatchSpanProcessorModel() + .withExporter( + new SpanExporterModel() + .withOtlp( + new OtlpModel() + .withEndpoint( + "http://collector:4317")))), + new SpanProcessorModel() + .withBatch( + new BatchSpanProcessorModel() + .withExporter( + new SpanExporterModel() + .withOtlp(new OtlpModel()))))))); + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/InstrumentSelectorFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/InstrumentSelectorFactoryTest.java index c10b6394bc4..3f3192e5853 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/InstrumentSelectorFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/InstrumentSelectorFactoryTest.java @@ -9,9 +9,9 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.mock; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Selector; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SelectorModel; import io.opentelemetry.sdk.metrics.InstrumentSelector; import io.opentelemetry.sdk.metrics.InstrumentType; import java.util.Collections; @@ -19,23 +19,13 @@ class InstrumentSelectorFactoryTest { - @Test - void create_Null() { - assertThatThrownBy( - () -> - InstrumentSelectorFactory.getInstance() - .create(null, mock(SpiHelper.class), Collections.emptyList())) - .isInstanceOf(ConfigurationException.class) - .hasMessage("selector must not be null"); - } - @Test void create_Defaults() { assertThatThrownBy( () -> InstrumentSelectorFactory.getInstance() - .create(new Selector(), mock(SpiHelper.class), Collections.emptyList())) - .isInstanceOf(ConfigurationException.class) + .create(new SelectorModel(), mock(SpiHelper.class), Collections.emptyList())) + .isInstanceOf(DeclarativeConfigException.class) .hasMessage("Invalid selector"); } @@ -44,9 +34,9 @@ void create() { assertThat( InstrumentSelectorFactory.getInstance() .create( - new Selector() + new SelectorModel() .withInstrumentName("instrument-name") - .withInstrumentType(Selector.InstrumentType.COUNTER) + .withInstrumentType(SelectorModel.InstrumentType.COUNTER) .withMeterName("meter-name") .withMeterSchemaUrl("https://opentelemetry.io/schemas/1.16.0") .withMeterVersion("1.0.0"), diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogLimitsFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogLimitsFactoryTest.java index 49a0b337af2..53e811e6629 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogLimitsFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogLimitsFactoryTest.java @@ -9,8 +9,8 @@ import static org.mockito.Mockito.mock; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimits; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimitsModel; import io.opentelemetry.sdk.logs.LogLimits; import java.util.Collections; import java.util.stream.Stream; @@ -31,21 +31,27 @@ void create(LogRecordLimitsAndAttributeLimits model, LogLimits expectedLogLimits private static Stream createArguments() { return Stream.of( - Arguments.of(null, LogLimits.builder().build()), Arguments.of( LogRecordLimitsAndAttributeLimits.create(null, null), LogLimits.builder().build()), Arguments.of( - LogRecordLimitsAndAttributeLimits.create(new AttributeLimits(), new LogRecordLimits()), + LogRecordLimitsAndAttributeLimits.create( + new AttributeLimitsModel(), new LogRecordLimitsModel()), LogLimits.builder().build()), Arguments.of( LogRecordLimitsAndAttributeLimits.create( - new AttributeLimits().withAttributeValueLengthLimit(1).withAttributeCountLimit(2), - new LogRecordLimits()), + new AttributeLimitsModel() + .withAttributeValueLengthLimit(1) + .withAttributeCountLimit(2), + new LogRecordLimitsModel()), LogLimits.builder().setMaxAttributeValueLength(1).setMaxNumberOfAttributes(2).build()), Arguments.of( LogRecordLimitsAndAttributeLimits.create( - new AttributeLimits().withAttributeValueLengthLimit(1).withAttributeCountLimit(2), - new LogRecordLimits().withAttributeValueLengthLimit(3).withAttributeCountLimit(4)), + new AttributeLimitsModel() + .withAttributeValueLengthLimit(1) + .withAttributeCountLimit(2), + new LogRecordLimitsModel() + .withAttributeValueLengthLimit(3) + .withAttributeCountLimit(4)), LogLimits.builder().setMaxAttributeValueLength(3).setMaxNumberOfAttributes(4).build())); } } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordExporterFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordExporterFactoryTest.java index 721421c8552..39b95d7e75d 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordExporterFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordExporterFactoryTest.java @@ -8,22 +8,22 @@ import static io.opentelemetry.sdk.extension.incubator.fileconfig.FileConfigTestUtil.createTempFileWithContent; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableMap; import com.linecorp.armeria.testing.junit5.server.SelfSignedCertificateExtension; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; import io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporter; -import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; import io.opentelemetry.internal.testing.CleanupExtension; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Headers; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Otlp; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.component.LogRecordExporterComponentProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.NameStringValuePairModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpModel; import io.opentelemetry.sdk.logs.export.LogRecordExporter; import java.io.Closeable; import java.io.IOException; @@ -31,12 +31,17 @@ import java.security.cert.CertificateEncodingException; import java.time.Duration; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.stream.Collectors; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.api.io.TempDir; import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; @@ -53,32 +58,46 @@ class LogRecordExporterFactoryTest { @RegisterExtension CleanupExtension cleanup = new CleanupExtension(); - private SpiHelper spiHelper = - SpiHelper.create(LogRecordExporterFactoryTest.class.getClassLoader()); - - @Test - void create_Null() { - LogRecordExporter expectedExporter = LogRecordExporter.composite(); - - LogRecordExporter exporter = - LogRecordExporterFactory.getInstance().create(null, spiHelper, new ArrayList<>()); + private final SpiHelper spiHelper = + spy(SpiHelper.create(SpanExporterFactoryTest.class.getClassLoader())); + private List> loadedComponentProviders = Collections.emptyList(); + + @BeforeEach + @SuppressWarnings("unchecked") + void setup() { + when(spiHelper.load(ComponentProvider.class)) + .thenAnswer( + invocation -> { + List> result = + (List>) invocation.callRealMethod(); + loadedComponentProviders = + result.stream().map(Mockito::spy).collect(Collectors.toList()); + return loadedComponentProviders; + }); + } - assertThat(exporter.toString()).isEqualTo(expectedExporter.toString()); + private ComponentProvider getComponentProvider(String name, Class type) { + return loadedComponentProviders.stream() + .filter( + componentProvider -> + componentProvider.getName().equals(name) + && componentProvider.getType().equals(type)) + .findFirst() + .orElseThrow(IllegalStateException::new); } @Test void create_OtlpDefaults() { - spiHelper = spy(spiHelper); List closeables = new ArrayList<>(); - OtlpGrpcLogRecordExporter expectedExporter = OtlpGrpcLogRecordExporter.getDefault(); + OtlpHttpLogRecordExporter expectedExporter = OtlpHttpLogRecordExporter.getDefault(); cleanup.addCloseable(expectedExporter); LogRecordExporter exporter = LogRecordExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .LogRecordExporter() - .withOtlp(new Otlp()), + .LogRecordExporterModel() + .withOtlp(new OtlpModel()), spiHelper, closeables); cleanup.addCloseable(exporter); @@ -86,25 +105,26 @@ void create_OtlpDefaults() { assertThat(exporter.toString()).isEqualTo(expectedExporter.toString()); - ArgumentCaptor configCaptor = ArgumentCaptor.forClass(ConfigProperties.class); - verify(spiHelper) - .loadConfigurable( - eq(ConfigurableLogRecordExporterProvider.class), any(), any(), configCaptor.capture()); - ConfigProperties configProperties = configCaptor.getValue(); - assertThat(configProperties.getString("otel.exporter.otlp.logs.protocol")).isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.endpoint")).isNull(); - assertThat(configProperties.getMap("otel.exporter.otlp.logs.headers")).isEmpty(); - assertThat(configProperties.getString("otel.exporter.otlp.logs.compression")).isNull(); - assertThat(configProperties.getDuration("otel.exporter.otlp.logs.timeout")).isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.logs.certificate")).isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.logs.client.key")).isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.logs.client.certificate")).isNull(); + assertThat(exporter.toString()).isEqualTo(expectedExporter.toString()); + + ArgumentCaptor configCaptor = + ArgumentCaptor.forClass(DeclarativeConfigProperties.class); + ComponentProvider componentProvider = getComponentProvider("otlp", LogRecordExporter.class); + verify(componentProvider).create(configCaptor.capture()); + DeclarativeConfigProperties configProperties = configCaptor.getValue(); + assertThat(configProperties.getString("protocol")).isNull(); + assertThat(configProperties.getString("endpoint")).isNull(); + assertThat(configProperties.getStructured("headers")).isNull(); + assertThat(configProperties.getString("compression")).isNull(); + assertThat(configProperties.getInt("timeout")).isNull(); + assertThat(configProperties.getString("certificate")).isNull(); + assertThat(configProperties.getString("client_key")).isNull(); + assertThat(configProperties.getString("client_certificate")).isNull(); } @Test void create_OtlpConfigured(@TempDir Path tempDir) throws CertificateEncodingException, IOException { - spiHelper = spy(spiHelper); List closeables = new ArrayList<>(); OtlpHttpLogRecordExporter expectedExporter = OtlpHttpLogRecordExporter.builder() @@ -130,15 +150,19 @@ void create_OtlpConfigured(@TempDir Path tempDir) LogRecordExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .LogRecordExporter() + .LogRecordExporterModel() .withOtlp( - new Otlp() + new OtlpModel() .withProtocol("http/protobuf") - .withEndpoint("http://example:4318") + .withEndpoint("http://example:4318/v1/logs") .withHeaders( - new Headers() - .withAdditionalProperty("key1", "value1") - .withAdditionalProperty("key2", "value2")) + Arrays.asList( + new NameStringValuePairModel() + .withName("key1") + .withValue("value1"), + new NameStringValuePairModel() + .withName("key2") + .withValue("value2"))) .withCompression("gzip") .withTimeout(15_000) .withCertificate(certificatePath) @@ -151,30 +175,34 @@ void create_OtlpConfigured(@TempDir Path tempDir) assertThat(exporter.toString()).isEqualTo(expectedExporter.toString()); - ArgumentCaptor configCaptor = ArgumentCaptor.forClass(ConfigProperties.class); - verify(spiHelper) - .loadConfigurable( - eq(ConfigurableLogRecordExporterProvider.class), any(), any(), configCaptor.capture()); - ConfigProperties configProperties = configCaptor.getValue(); - assertThat(configProperties.getString("otel.exporter.otlp.logs.protocol")) - .isEqualTo("http/protobuf"); - assertThat(configProperties.getString("otel.exporter.otlp.endpoint")) - .isEqualTo("http://example:4318"); - assertThat(configProperties.getMap("otel.exporter.otlp.logs.headers")) - .isEqualTo(ImmutableMap.of("key1", "value1", "key2", "value2")); - assertThat(configProperties.getString("otel.exporter.otlp.logs.compression")).isEqualTo("gzip"); - assertThat(configProperties.getDuration("otel.exporter.otlp.logs.timeout")) - .isEqualTo(Duration.ofSeconds(15)); - assertThat(configProperties.getString("otel.exporter.otlp.logs.certificate")) - .isEqualTo(certificatePath); - assertThat(configProperties.getString("otel.exporter.otlp.logs.client.key")) - .isEqualTo(clientKeyPath); - assertThat(configProperties.getString("otel.exporter.otlp.logs.client.certificate")) - .isEqualTo(clientCertificatePath); + ArgumentCaptor configCaptor = + ArgumentCaptor.forClass(DeclarativeConfigProperties.class); + ComponentProvider componentProvider = getComponentProvider("otlp", LogRecordExporter.class); + verify(componentProvider).create(configCaptor.capture()); + DeclarativeConfigProperties configProperties = configCaptor.getValue(); + assertThat(configProperties.getString("protocol")).isEqualTo("http/protobuf"); + assertThat(configProperties.getString("endpoint")).isEqualTo("http://example:4318/v1/logs"); + List headers = configProperties.getStructuredList("headers"); + assertThat(headers) + .isNotNull() + .satisfiesExactly( + header -> { + assertThat(header.getString("name")).isEqualTo("key1"); + assertThat(header.getString("value")).isEqualTo("value1"); + }, + header -> { + assertThat(header.getString("name")).isEqualTo("key2"); + assertThat(header.getString("value")).isEqualTo("value2"); + }); + assertThat(configProperties.getString("compression")).isEqualTo("gzip"); + assertThat(configProperties.getInt("timeout")).isEqualTo(Duration.ofSeconds(15).toMillis()); + assertThat(configProperties.getString("certificate")).isEqualTo(certificatePath); + assertThat(configProperties.getString("client_key")).isEqualTo(clientKeyPath); + assertThat(configProperties.getString("client_certificate")).isEqualTo(clientCertificatePath); } @Test - void create_SpiExporter() { + void create_SpiExporter_Unknown() { List closeables = new ArrayList<>(); assertThatThrownBy( @@ -182,12 +210,31 @@ void create_SpiExporter() { LogRecordExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .LogRecordExporter() - .withAdditionalProperty("test", ImmutableMap.of("key1", "value1")), + .LogRecordExporterModel() + .withAdditionalProperty( + "unknown_key", ImmutableMap.of("key1", "value1")), spiHelper, new ArrayList<>())) - .isInstanceOf(ConfigurationException.class) - .hasMessage("Unrecognized log record exporter(s): [test]"); + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage( + "No component provider detected for io.opentelemetry.sdk.logs.export.LogRecordExporter with name \"unknown_key\"."); cleanup.addCloseables(closeables); } + + @Test + void create_SpiExporter_Valid() { + LogRecordExporter logRecordExporter = + LogRecordExporterFactory.getInstance() + .create( + new LogRecordExporterModel() + .withAdditionalProperty("test", ImmutableMap.of("key1", "value1")), + spiHelper, + new ArrayList<>()); + assertThat(logRecordExporter) + .isInstanceOf(LogRecordExporterComponentProvider.TestLogRecordExporter.class); + assertThat( + ((LogRecordExporterComponentProvider.TestLogRecordExporter) logRecordExporter) + .config.getString("key1")) + .isEqualTo("value1"); + } } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordProcessorFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordProcessorFactoryTest.java index 043533117b2..00314510786 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordProcessorFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LogRecordProcessorFactoryTest.java @@ -9,20 +9,22 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.google.common.collect.ImmutableMap; -import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporter; import io.opentelemetry.internal.testing.CleanupExtension; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchLogRecordProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Otlp; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleLogRecordProcessor; +import io.opentelemetry.sdk.extension.incubator.fileconfig.component.LogRecordProcessorComponentProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchLogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleLogRecordProcessorModel; import java.io.Closeable; import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; @@ -33,34 +35,17 @@ class LogRecordProcessorFactoryTest { private final SpiHelper spiHelper = SpiHelper.create(LogRecordProcessorFactoryTest.class.getClassLoader()); - @Test - void create_Null() { - List closeables = new ArrayList<>(); - - io.opentelemetry.sdk.logs.LogRecordProcessor processor = - LogRecordProcessorFactory.getInstance().create(null, spiHelper, Collections.emptyList()); - cleanup.addCloseable(processor); - cleanup.addCloseables(closeables); - - assertThat(processor.toString()) - .isEqualTo(io.opentelemetry.sdk.logs.LogRecordProcessor.composite().toString()); - } - @Test void create_BatchNullExporter() { - List closeables = new ArrayList<>(); - - io.opentelemetry.sdk.logs.LogRecordProcessor processor = - LogRecordProcessorFactory.getInstance() - .create( - new LogRecordProcessor().withBatch(new BatchLogRecordProcessor()), - spiHelper, - Collections.emptyList()); - cleanup.addCloseable(processor); - cleanup.addCloseables(closeables); - - assertThat(processor.toString()) - .isEqualTo(io.opentelemetry.sdk.logs.LogRecordProcessor.composite().toString()); + assertThatThrownBy( + () -> + LogRecordProcessorFactory.getInstance() + .create( + new LogRecordProcessorModel().withBatch(new BatchLogRecordProcessorModel()), + spiHelper, + Collections.emptyList())) + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage("batch log record processor exporter is required but is null"); } @Test @@ -68,17 +53,17 @@ void create_BatchDefaults() { List closeables = new ArrayList<>(); io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor expectedProcessor = io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor.builder( - OtlpGrpcLogRecordExporter.getDefault()) + OtlpHttpLogRecordExporter.getDefault()) .build(); cleanup.addCloseable(expectedProcessor); io.opentelemetry.sdk.logs.LogRecordProcessor processor = LogRecordProcessorFactory.getInstance() .create( - new LogRecordProcessor() + new LogRecordProcessorModel() .withBatch( - new BatchLogRecordProcessor() - .withExporter(new LogRecordExporter().withOtlp(new Otlp()))), + new BatchLogRecordProcessorModel() + .withExporter(new LogRecordExporterModel().withOtlp(new OtlpModel()))), spiHelper, closeables); cleanup.addCloseable(processor); @@ -92,7 +77,7 @@ void create_BatchConfigured() { List closeables = new ArrayList<>(); io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor expectedProcessor = io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor.builder( - OtlpGrpcLogRecordExporter.getDefault()) + OtlpHttpLogRecordExporter.getDefault()) .setScheduleDelay(Duration.ofMillis(1)) .setMaxExportBatchSize(2) .setExporterTimeout(Duration.ofMillis(3)) @@ -102,10 +87,10 @@ void create_BatchConfigured() { io.opentelemetry.sdk.logs.LogRecordProcessor processor = LogRecordProcessorFactory.getInstance() .create( - new LogRecordProcessor() + new LogRecordProcessorModel() .withBatch( - new BatchLogRecordProcessor() - .withExporter(new LogRecordExporter().withOtlp(new Otlp())) + new BatchLogRecordProcessorModel() + .withExporter(new LogRecordExporterModel().withOtlp(new OtlpModel())) .withScheduleDelay(1) .withMaxExportBatchSize(2) .withExportTimeout(3)), @@ -119,19 +104,16 @@ void create_BatchConfigured() { @Test void create_SimpleNullExporter() { - List closeables = new ArrayList<>(); - - io.opentelemetry.sdk.logs.LogRecordProcessor processor = - LogRecordProcessorFactory.getInstance() - .create( - new LogRecordProcessor().withSimple(new SimpleLogRecordProcessor()), - spiHelper, - Collections.emptyList()); - cleanup.addCloseable(processor); - cleanup.addCloseables(closeables); - - assertThat(processor.toString()) - .isEqualTo(io.opentelemetry.sdk.logs.LogRecordProcessor.composite().toString()); + assertThatThrownBy( + () -> + LogRecordProcessorFactory.getInstance() + .create( + new LogRecordProcessorModel() + .withSimple(new SimpleLogRecordProcessorModel()), + spiHelper, + Collections.emptyList())) + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage("simple log record processor exporter is required but is null"); } @Test @@ -139,16 +121,16 @@ void create_SimpleConfigured() { List closeables = new ArrayList<>(); io.opentelemetry.sdk.logs.LogRecordProcessor expectedProcessor = io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor.create( - OtlpGrpcLogRecordExporter.getDefault()); + OtlpHttpLogRecordExporter.getDefault()); cleanup.addCloseable(expectedProcessor); io.opentelemetry.sdk.logs.LogRecordProcessor processor = LogRecordProcessorFactory.getInstance() .create( - new LogRecordProcessor() + new LogRecordProcessorModel() .withSimple( - new SimpleLogRecordProcessor() - .withExporter(new LogRecordExporter().withOtlp(new Otlp()))), + new SimpleLogRecordProcessorModel() + .withExporter(new LogRecordExporterModel().withOtlp(new OtlpModel()))), spiHelper, closeables); cleanup.addCloseable(processor); @@ -158,19 +140,35 @@ void create_SimpleConfigured() { } @Test - void create_SpiProcessor() { - List closeables = new ArrayList<>(); - + void create_SpiProcessor_Unknown() { assertThatThrownBy( () -> LogRecordProcessorFactory.getInstance() .create( - new LogRecordProcessor() - .withAdditionalProperty("test", ImmutableMap.of("key1", "value1")), + new LogRecordProcessorModel() + .withAdditionalProperty( + "unknown_key", ImmutableMap.of("key1", "value1")), spiHelper, - closeables)) - .isInstanceOf(ConfigurationException.class) - .hasMessage("Unrecognized log record processor(s): [test]"); - cleanup.addCloseables(closeables); + new ArrayList<>())) + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage( + "No component provider detected for io.opentelemetry.sdk.logs.LogRecordProcessor with name \"unknown_key\"."); + } + + @Test + void create_SpiExporter_Valid() { + io.opentelemetry.sdk.logs.LogRecordProcessor logRecordProcessor = + LogRecordProcessorFactory.getInstance() + .create( + new LogRecordProcessorModel() + .withAdditionalProperty("test", ImmutableMap.of("key1", "value1")), + spiHelper, + new ArrayList<>()); + assertThat(logRecordProcessor) + .isInstanceOf(LogRecordProcessorComponentProvider.TestLogRecordProcessor.class); + Assertions.assertThat( + ((LogRecordProcessorComponentProvider.TestLogRecordProcessor) logRecordProcessor) + .config.getString("key1")) + .isEqualTo("value1"); } } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LoggerProviderFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LoggerProviderFactoryTest.java index 4ab9403b425..78aceb89896 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LoggerProviderFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/LoggerProviderFactoryTest.java @@ -7,16 +7,16 @@ import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; -import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; +import io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporter; import io.opentelemetry.internal.testing.CleanupExtension; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchLogRecordProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LoggerProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Otlp; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchLogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LoggerProviderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpModel; import io.opentelemetry.sdk.logs.LogLimits; import io.opentelemetry.sdk.logs.SdkLoggerProvider; import java.io.Closeable; @@ -52,28 +52,29 @@ void create(LoggerProviderAndAttributeLimits model, SdkLoggerProvider expectedPr private static Stream createArguments() { return Stream.of( - Arguments.of(null, SdkLoggerProvider.builder().build()), Arguments.of( LoggerProviderAndAttributeLimits.create(null, null), SdkLoggerProvider.builder().build()), Arguments.of( - LoggerProviderAndAttributeLimits.create(new AttributeLimits(), new LoggerProvider()), + LoggerProviderAndAttributeLimits.create( + new AttributeLimitsModel(), new LoggerProviderModel()), SdkLoggerProvider.builder().build()), Arguments.of( LoggerProviderAndAttributeLimits.create( - new AttributeLimits(), - new LoggerProvider() + new AttributeLimitsModel(), + new LoggerProviderModel() .withLimits( - new LogRecordLimits() + new LogRecordLimitsModel() .withAttributeCountLimit(1) .withAttributeValueLengthLimit(2)) .withProcessors( Collections.singletonList( - new LogRecordProcessor() + new LogRecordProcessorModel() .withBatch( - new BatchLogRecordProcessor() + new BatchLogRecordProcessorModel() .withExporter( - new LogRecordExporter().withOtlp(new Otlp())))))), + new LogRecordExporterModel() + .withOtlp(new OtlpModel())))))), SdkLoggerProvider.builder() .setLogLimits( () -> @@ -83,7 +84,7 @@ private static Stream createArguments() { .build()) .addLogRecordProcessor( io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor.builder( - OtlpGrpcLogRecordExporter.getDefault()) + OtlpHttpLogRecordExporter.getDefault()) .build()) .build())); } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MeterProviderFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MeterProviderFactoryTest.java index bdcc24626a7..29f7996f519 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MeterProviderFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MeterProviderFactoryTest.java @@ -7,16 +7,16 @@ import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; -import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; +import io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporter; import io.opentelemetry.internal.testing.CleanupExtension; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MeterProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReader; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetric; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PeriodicMetricReader; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Selector; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Stream; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MeterProviderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetricModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PeriodicMetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PushMetricExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SelectorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.StreamModel; import io.opentelemetry.sdk.metrics.InstrumentSelector; import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.View; @@ -34,20 +34,6 @@ class MeterProviderFactoryTest { private final SpiHelper spiHelper = SpiHelper.create(MeterProviderFactoryTest.class.getClassLoader()); - @Test - void create_Null() { - List closeables = new ArrayList<>(); - SdkMeterProvider expectedProvider = SdkMeterProvider.builder().build(); - cleanup.addCloseable(expectedProvider); - - SdkMeterProvider provider = - MeterProviderFactory.getInstance().create(null, spiHelper, closeables).build(); - cleanup.addCloseable(provider); - cleanup.addCloseables(closeables); - - assertThat(provider.toString()).isEqualTo(expectedProvider.toString()); - } - @Test void create_Defaults() { List closeables = new ArrayList<>(); @@ -56,7 +42,7 @@ void create_Defaults() { SdkMeterProvider provider = MeterProviderFactory.getInstance() - .create(new MeterProvider(), spiHelper, closeables) + .create(new MeterProviderModel(), spiHelper, closeables) .build(); cleanup.addCloseable(provider); cleanup.addCloseables(closeables); @@ -71,7 +57,7 @@ void create_Configured() { SdkMeterProvider.builder() .registerMetricReader( io.opentelemetry.sdk.metrics.export.PeriodicMetricReader.builder( - OtlpGrpcMetricExporter.getDefault()) + OtlpHttpMetricExporter.getDefault()) .build()) .registerView( InstrumentSelector.builder().setName("instrument-name").build(), @@ -82,21 +68,25 @@ void create_Configured() { SdkMeterProvider provider = MeterProviderFactory.getInstance() .create( - new MeterProvider() + new MeterProviderModel() .withReaders( Collections.singletonList( - new MetricReader() + new MetricReaderModel() .withPeriodic( - new PeriodicMetricReader() + new PeriodicMetricReaderModel() .withExporter( - new MetricExporter().withOtlp(new OtlpMetric()))))) + new PushMetricExporterModel() + .withOtlp(new OtlpMetricModel()))))) .withViews( Collections.singletonList( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .View() - .withSelector(new Selector().withInstrumentName("instrument-name")) + .ViewModel() + .withSelector( + new SelectorModel().withInstrumentName("instrument-name")) .withStream( - new Stream().withName("stream-name").withAttributeKeys(null)))), + new StreamModel() + .withName("stream-name") + .withAttributeKeys(null)))), spiHelper, closeables) .build(); diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricExporterFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricExporterFactoryTest.java index 4b9b7520441..51b066b2d97 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricExporterFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricExporterFactoryTest.java @@ -8,26 +8,27 @@ import static io.opentelemetry.sdk.extension.incubator.fileconfig.FileConfigTestUtil.createTempFileWithContent; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableMap; import com.linecorp.armeria.testing.junit5.server.SelfSignedCertificateExtension; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; import io.opentelemetry.exporter.logging.LoggingMetricExporter; import io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporter; -import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; import io.opentelemetry.internal.testing.CleanupExtension; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Console; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Headers; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetric; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetric.DefaultHistogramAggregation; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Prometheus; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.component.MetricExporterComponentProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ConsoleModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.NameStringValuePairModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetricModel; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; +import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; import io.opentelemetry.sdk.metrics.export.MetricExporter; import java.io.Closeable; import java.io.IOException; @@ -35,12 +36,17 @@ import java.security.cert.CertificateEncodingException; import java.time.Duration; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.stream.Collectors; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.api.io.TempDir; import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; @@ -57,27 +63,46 @@ class MetricExporterFactoryTest { @RegisterExtension CleanupExtension cleanup = new CleanupExtension(); - private SpiHelper spiHelper = SpiHelper.create(MetricExporterFactoryTest.class.getClassLoader()); + private final SpiHelper spiHelper = + spy(SpiHelper.create(SpanExporterFactoryTest.class.getClassLoader())); + private List> loadedComponentProviders = Collections.emptyList(); - @Test - void create_Null() { - assertThat(MetricExporterFactory.getInstance().create(null, spiHelper, new ArrayList<>())) - .isNull(); + @BeforeEach + @SuppressWarnings("unchecked") + void setup() { + when(spiHelper.load(ComponentProvider.class)) + .thenAnswer( + invocation -> { + List> result = + (List>) invocation.callRealMethod(); + loadedComponentProviders = + result.stream().map(Mockito::spy).collect(Collectors.toList()); + return loadedComponentProviders; + }); + } + + private ComponentProvider getComponentProvider(String name, Class type) { + return loadedComponentProviders.stream() + .filter( + componentProvider -> + componentProvider.getName().equals(name) + && componentProvider.getType().equals(type)) + .findFirst() + .orElseThrow(IllegalStateException::new); } @Test void create_OtlpDefaults() { - spiHelper = spy(spiHelper); List closeables = new ArrayList<>(); - OtlpGrpcMetricExporter expectedExporter = OtlpGrpcMetricExporter.getDefault(); + OtlpHttpMetricExporter expectedExporter = OtlpHttpMetricExporter.getDefault(); cleanup.addCloseable(expectedExporter); MetricExporter exporter = MetricExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .MetricExporter() - .withOtlp(new OtlpMetric()), + .PushMetricExporterModel() + .withOtlp(new OtlpMetricModel()), spiHelper, closeables); cleanup.addCloseable(exporter); @@ -85,31 +110,26 @@ void create_OtlpDefaults() { assertThat(exporter.toString()).isEqualTo(expectedExporter.toString()); - ArgumentCaptor configCaptor = ArgumentCaptor.forClass(ConfigProperties.class); - verify(spiHelper) - .loadConfigurable( - eq(ConfigurableMetricExporterProvider.class), any(), any(), configCaptor.capture()); - ConfigProperties configProperties = configCaptor.getValue(); - assertThat(configProperties.getString("otel.exporter.otlp.metrics.protocol")).isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.endpoint")).isNull(); - assertThat(configProperties.getMap("otel.exporter.otlp.metrics.headers")).isEmpty(); - assertThat(configProperties.getString("otel.exporter.otlp.metrics.compression")).isNull(); - assertThat(configProperties.getDuration("otel.exporter.otlp.metrics.timeout")).isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.metrics.certificate")).isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.metrics.client.key")).isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.metrics.client.certificate")) - .isNull(); - assertThat( - configProperties.getString("otel.exporter.otlp.metrics.default.histogram.aggregation")) - .isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.metrics.temporality.preference")) - .isNull(); + ArgumentCaptor configCaptor = + ArgumentCaptor.forClass(DeclarativeConfigProperties.class); + ComponentProvider componentProvider = getComponentProvider("otlp", MetricExporter.class); + verify(componentProvider).create(configCaptor.capture()); + DeclarativeConfigProperties configProperties = configCaptor.getValue(); + assertThat(configProperties.getString("protocol")).isNull(); + assertThat(configProperties.getString("endpoint")).isNull(); + assertThat(configProperties.getStructured("headers")).isNull(); + assertThat(configProperties.getString("compression")).isNull(); + assertThat(configProperties.getInt("timeout")).isNull(); + assertThat(configProperties.getString("certificate")).isNull(); + assertThat(configProperties.getString("client_key")).isNull(); + assertThat(configProperties.getString("client_certificate")).isNull(); + assertThat(configProperties.getString("temporality_preference")).isNull(); + assertThat(configProperties.getString("default_histogram_aggregation")).isNull(); } @Test void create_OtlpConfigured(@TempDir Path tempDir) throws CertificateEncodingException, IOException { - spiHelper = spy(spiHelper); List closeables = new ArrayList<>(); OtlpHttpMetricExporter expectedExporter = OtlpHttpMetricExporter.builder() @@ -118,6 +138,10 @@ void create_OtlpConfigured(@TempDir Path tempDir) .addHeader("key2", "value2") .setTimeout(Duration.ofSeconds(15)) .setCompression("gzip") + .setAggregationTemporalitySelector(AggregationTemporalitySelector.deltaPreferred()) + .setDefaultAggregationSelector( + DefaultAggregationSelector.getDefault() + .with(InstrumentType.HISTOGRAM, Aggregation.base2ExponentialBucketHistogram())) .build(); cleanup.addCloseable(expectedExporter); @@ -135,15 +159,19 @@ void create_OtlpConfigured(@TempDir Path tempDir) MetricExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .MetricExporter() + .PushMetricExporterModel() .withOtlp( - new OtlpMetric() + new OtlpMetricModel() .withProtocol("http/protobuf") - .withEndpoint("http://example:4318") + .withEndpoint("http://example:4318/v1/metrics") .withHeaders( - new Headers() - .withAdditionalProperty("key1", "value1") - .withAdditionalProperty("key2", "value2")) + Arrays.asList( + new NameStringValuePairModel() + .withName("key1") + .withValue("value1"), + new NameStringValuePairModel() + .withName("key2") + .withValue("value2"))) .withCompression("gzip") .withTimeout(15_000) .withCertificate(certificatePath) @@ -151,7 +179,8 @@ void create_OtlpConfigured(@TempDir Path tempDir) .withClientCertificate(clientCertificatePath) .withTemporalityPreference("delta") .withDefaultHistogramAggregation( - DefaultHistogramAggregation.BASE_2_EXPONENTIAL_BUCKET_HISTOGRAM)), + OtlpMetricModel.DefaultHistogramAggregation + .BASE_2_EXPONENTIAL_BUCKET_HISTOGRAM)), spiHelper, closeables); cleanup.addCloseable(exporter); @@ -159,37 +188,37 @@ void create_OtlpConfigured(@TempDir Path tempDir) assertThat(exporter.toString()).isEqualTo(expectedExporter.toString()); - ArgumentCaptor configCaptor = ArgumentCaptor.forClass(ConfigProperties.class); - verify(spiHelper) - .loadConfigurable( - eq(ConfigurableMetricExporterProvider.class), any(), any(), configCaptor.capture()); - ConfigProperties configProperties = configCaptor.getValue(); - assertThat(configProperties.getString("otel.exporter.otlp.metrics.protocol")) - .isEqualTo("http/protobuf"); - assertThat(configProperties.getString("otel.exporter.otlp.endpoint")) - .isEqualTo("http://example:4318"); - assertThat(configProperties.getMap("otel.exporter.otlp.metrics.headers")) - .isEqualTo(ImmutableMap.of("key1", "value1", "key2", "value2")); - assertThat(configProperties.getString("otel.exporter.otlp.metrics.compression")) - .isEqualTo("gzip"); - assertThat(configProperties.getDuration("otel.exporter.otlp.metrics.timeout")) - .isEqualTo(Duration.ofSeconds(15)); - assertThat(configProperties.getString("otel.exporter.otlp.metrics.certificate")) - .isEqualTo(certificatePath); - assertThat(configProperties.getString("otel.exporter.otlp.metrics.client.key")) - .isEqualTo(clientKeyPath); - assertThat(configProperties.getString("otel.exporter.otlp.metrics.client.certificate")) - .isEqualTo(clientCertificatePath); - assertThat(configProperties.getString("otel.exporter.otlp.metrics.temporality.preference")) - .isEqualTo("delta"); - assertThat( - configProperties.getString("otel.exporter.otlp.metrics.default.histogram.aggregation")) + ArgumentCaptor configCaptor = + ArgumentCaptor.forClass(DeclarativeConfigProperties.class); + ComponentProvider componentProvider = getComponentProvider("otlp", MetricExporter.class); + verify(componentProvider).create(configCaptor.capture()); + DeclarativeConfigProperties configProperties = configCaptor.getValue(); + assertThat(configProperties.getString("protocol")).isEqualTo("http/protobuf"); + assertThat(configProperties.getString("endpoint")).isEqualTo("http://example:4318/v1/metrics"); + List headers = configProperties.getStructuredList("headers"); + assertThat(headers) + .isNotNull() + .satisfiesExactly( + header -> { + assertThat(header.getString("name")).isEqualTo("key1"); + assertThat(header.getString("value")).isEqualTo("value1"); + }, + header -> { + assertThat(header.getString("name")).isEqualTo("key2"); + assertThat(header.getString("value")).isEqualTo("value2"); + }); + assertThat(configProperties.getString("compression")).isEqualTo("gzip"); + assertThat(configProperties.getInt("timeout")).isEqualTo(Duration.ofSeconds(15).toMillis()); + assertThat(configProperties.getString("certificate")).isEqualTo(certificatePath); + assertThat(configProperties.getString("client_key")).isEqualTo(clientKeyPath); + assertThat(configProperties.getString("client_certificate")).isEqualTo(clientCertificatePath); + assertThat(configProperties.getString("temporality_preference")).isEqualTo("delta"); + assertThat(configProperties.getString("default_histogram_aggregation")) .isEqualTo("base2_exponential_bucket_histogram"); } @Test void create_Console() { - spiHelper = spy(spiHelper); List closeables = new ArrayList<>(); LoggingMetricExporter expectedExporter = LoggingMetricExporter.create(); cleanup.addCloseable(expectedExporter); @@ -198,8 +227,8 @@ void create_Console() { MetricExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .MetricExporter() - .withConsole(new Console()), + .PushMetricExporterModel() + .withConsole(new ConsoleModel()), spiHelper, closeables); cleanup.addCloseable(exporter); @@ -209,38 +238,37 @@ void create_Console() { } @Test - void create_PrometheusExporter() { - List closeables = new ArrayList<>(); - + void create_SpiExporter_Unknown() { assertThatThrownBy( () -> MetricExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .MetricExporter() - .withPrometheus(new Prometheus()), + .PushMetricExporterModel() + .withAdditionalProperty( + "unknown_key", ImmutableMap.of("key1", "value1")), spiHelper, new ArrayList<>())) - .isInstanceOf(ConfigurationException.class) - .hasMessage("prometheus exporter not supported in this context"); - cleanup.addCloseables(closeables); + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage( + "No component provider detected for io.opentelemetry.sdk.metrics.export.MetricExporter with name \"unknown_key\"."); } @Test - void create_SpiExporter() { - List closeables = new ArrayList<>(); - - assertThatThrownBy( - () -> - MetricExporterFactory.getInstance() - .create( - new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .MetricExporter() - .withAdditionalProperty("test", ImmutableMap.of("key1", "value1")), - spiHelper, - new ArrayList<>())) - .isInstanceOf(ConfigurationException.class) - .hasMessage("Unrecognized metric exporter(s): [test]"); - cleanup.addCloseables(closeables); + void create_SpiExporter_Valid() { + MetricExporter metricExporter = + MetricExporterFactory.getInstance() + .create( + new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model + .PushMetricExporterModel() + .withAdditionalProperty("test", ImmutableMap.of("key1", "value1")), + spiHelper, + new ArrayList<>()); + assertThat(metricExporter) + .isInstanceOf(MetricExporterComponentProvider.TestMetricExporter.class); + assertThat( + ((MetricExporterComponentProvider.TestMetricExporter) metricExporter) + .config.getString("key1")) + .isEqualTo("value1"); } } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricReaderFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricReaderFactoryTest.java index ae1dfceedb4..287e8977866 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricReaderFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/MetricReaderFactoryTest.java @@ -6,27 +6,24 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; -import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import io.github.netmikey.logunit.api.LogCapturer; -import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporter; import io.opentelemetry.exporter.prometheus.PrometheusHttpServer; import io.opentelemetry.internal.testing.CleanupExtension; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.autoconfigure.spi.internal.ConfigurableMetricReaderProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReader; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetric; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PeriodicMetricReader; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Prometheus; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PullMetricReader; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetricModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PeriodicMetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PrometheusModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PullMetricExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PullMetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PushMetricExporterModel; import java.io.Closeable; import java.io.IOException; import java.net.ServerSocket; @@ -36,7 +33,6 @@ import java.util.List; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; -import org.mockito.ArgumentCaptor; class MetricReaderFactoryTest { @@ -44,27 +40,21 @@ class MetricReaderFactoryTest { @RegisterExtension LogCapturer logCapturer = - LogCapturer.create().captureForLogger(ConfigurationFactory.class.getName()); + LogCapturer.create().captureForLogger(DeclarativeConfiguration.class.getName()); private SpiHelper spiHelper = SpiHelper.create(MetricReaderFactoryTest.class.getClassLoader()); - @Test - void create_Null() { - assertThat(MetricReaderFactory.getInstance().create(null, spiHelper, Collections.emptyList())) - .isNull(); - } - @Test void create_PeriodicNullExporter() { assertThatThrownBy( () -> MetricReaderFactory.getInstance() .create( - new MetricReader().withPeriodic(new PeriodicMetricReader()), + new MetricReaderModel().withPeriodic(new PeriodicMetricReaderModel()), spiHelper, Collections.emptyList())) - .isInstanceOf(ConfigurationException.class) - .hasMessage("exporter required for periodic reader"); + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage("periodic metric reader exporter is required but is null"); } @Test @@ -72,17 +62,18 @@ void create_PeriodicDefaults() { List closeables = new ArrayList<>(); io.opentelemetry.sdk.metrics.export.PeriodicMetricReader expectedReader = io.opentelemetry.sdk.metrics.export.PeriodicMetricReader.builder( - OtlpGrpcMetricExporter.getDefault()) + OtlpHttpMetricExporter.getDefault()) .build(); cleanup.addCloseable(expectedReader); io.opentelemetry.sdk.metrics.export.MetricReader reader = MetricReaderFactory.getInstance() .create( - new MetricReader() + new MetricReaderModel() .withPeriodic( - new PeriodicMetricReader() - .withExporter(new MetricExporter().withOtlp(new OtlpMetric()))), + new PeriodicMetricReaderModel() + .withExporter( + new PushMetricExporterModel().withOtlp(new OtlpMetricModel()))), spiHelper, closeables); cleanup.addCloseable(reader); @@ -96,7 +87,7 @@ void create_PeriodicConfigured() { List closeables = new ArrayList<>(); io.opentelemetry.sdk.metrics.export.MetricReader expectedReader = io.opentelemetry.sdk.metrics.export.PeriodicMetricReader.builder( - OtlpGrpcMetricExporter.getDefault()) + OtlpHttpMetricExporter.getDefault()) .setInterval(Duration.ofMillis(1)) .build(); cleanup.addCloseable(expectedReader); @@ -104,10 +95,11 @@ void create_PeriodicConfigured() { io.opentelemetry.sdk.metrics.export.MetricReader reader = MetricReaderFactory.getInstance() .create( - new MetricReader() + new MetricReaderModel() .withPeriodic( - new PeriodicMetricReader() - .withExporter(new MetricExporter().withOtlp(new OtlpMetric())) + new PeriodicMetricReaderModel() + .withExporter( + new PushMetricExporterModel().withOtlp(new OtlpMetricModel())) .withInterval(1)), spiHelper, closeables); @@ -129,26 +121,20 @@ void create_PullPrometheusDefault() throws IOException { io.opentelemetry.sdk.metrics.export.MetricReader reader = MetricReaderFactory.getInstance() .create( - new MetricReader() + new MetricReaderModel() .withPull( - new PullMetricReader() + new PullMetricReaderModel() .withExporter( - new MetricExporter() - .withPrometheus(new Prometheus().withPort(port)))), + new PullMetricExporterModel() + .withPrometheus(new PrometheusModel().withPort(port)))), spiHelper, closeables); cleanup.addCloseable(reader); cleanup.addCloseables(closeables); assertThat(reader.toString()).isEqualTo(expectedReader.toString()); - - ArgumentCaptor configCaptor = ArgumentCaptor.forClass(ConfigProperties.class); - verify(spiHelper) - .loadConfigurable( - eq(ConfigurableMetricReaderProvider.class), any(), any(), configCaptor.capture()); - ConfigProperties configProperties = configCaptor.getValue(); - assertThat(configProperties.getString("otel.exporter.prometheus.host")).isNull(); - assertThat(configProperties.getInt("otel.exporter.prometheus.port")).isEqualTo(port); + // TODO(jack-berg): validate prometheus component provider was invoked with correct arguments + verify(spiHelper).load(ComponentProvider.class); } @Test @@ -165,27 +151,23 @@ void create_PullPrometheusConfigured() throws IOException { io.opentelemetry.sdk.metrics.export.MetricReader reader = MetricReaderFactory.getInstance() .create( - new MetricReader() + new MetricReaderModel() .withPull( - new PullMetricReader() + new PullMetricReaderModel() .withExporter( - new MetricExporter() + new PullMetricExporterModel() .withPrometheus( - new Prometheus().withHost("localhost").withPort(port)))), + new PrometheusModel() + .withHost("localhost") + .withPort(port)))), spiHelper, closeables); cleanup.addCloseable(reader); cleanup.addCloseables(closeables); assertThat(reader.toString()).isEqualTo(expectedReader.toString()); - - ArgumentCaptor configCaptor = ArgumentCaptor.forClass(ConfigProperties.class); - verify(spiHelper) - .loadConfigurable( - eq(ConfigurableMetricReaderProvider.class), any(), any(), configCaptor.capture()); - ConfigProperties configProperties = configCaptor.getValue(); - assertThat(configProperties.getString("otel.exporter.prometheus.host")).isEqualTo("localhost"); - assertThat(configProperties.getInt("otel.exporter.prometheus.port")).isEqualTo(port); + // TODO(jack-berg): validate prometheus component provider was invoked with correct arguments + verify(spiHelper).load(ComponentProvider.class); } @Test @@ -194,34 +176,23 @@ void create_InvalidPullReader() { () -> MetricReaderFactory.getInstance() .create( - new MetricReader().withPull(new PullMetricReader()), + new MetricReaderModel().withPull(new PullMetricReaderModel()), spiHelper, Collections.emptyList())) - .isInstanceOf(ConfigurationException.class) - .hasMessage("exporter required for pull reader"); - - assertThatThrownBy( - () -> - MetricReaderFactory.getInstance() - .create( - new MetricReader() - .withPull(new PullMetricReader().withExporter(new MetricExporter())), - spiHelper, - Collections.emptyList())) - .isInstanceOf(ConfigurationException.class) - .hasMessage("prometheus is the only currently supported pull reader"); + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage("pull metric reader exporter is required but is null"); assertThatThrownBy( () -> MetricReaderFactory.getInstance() .create( - new MetricReader() + new MetricReaderModel() .withPull( - new PullMetricReader() - .withExporter(new MetricExporter().withOtlp(new OtlpMetric()))), + new PullMetricReaderModel() + .withExporter(new PullMetricExporterModel())), spiHelper, Collections.emptyList())) - .isInstanceOf(ConfigurationException.class) + .isInstanceOf(DeclarativeConfigException.class) .hasMessage("prometheus is the only currently supported pull reader"); } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/OpenTelemetryConfigurationFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/OpenTelemetryConfigurationFactoryTest.java index 3bede9ea2e5..3d102c13dfa 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/OpenTelemetryConfigurationFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/OpenTelemetryConfigurationFactoryTest.java @@ -10,42 +10,43 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import io.opentelemetry.api.baggage.propagation.W3CBaggagePropagator; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.propagation.ContextPropagators; import io.opentelemetry.context.propagation.TextMapPropagator; -import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; -import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; -import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; +import io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporter; +import io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporter; +import io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporter; import io.opentelemetry.extension.trace.propagation.B3Propagator; import io.opentelemetry.extension.trace.propagation.JaegerPropagator; import io.opentelemetry.extension.trace.propagation.OtTracePropagator; import io.opentelemetry.internal.testing.CleanupExtension; import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOn; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Attributes; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchLogRecordProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchSpanProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LoggerProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MeterProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReader; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfiguration; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Otlp; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetric; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PeriodicMetricReader; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Resource; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Sampler; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Selector; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleLogRecordProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Stream; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TracerProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOnModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeNameValueModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchLogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchSpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LoggerProviderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MeterProviderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfigurationModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetricModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PeriodicMetricReaderModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PropagatorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PushMetricExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ResourceModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SamplerModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SelectorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleLogRecordProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.StreamModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TracerProviderModel; import io.opentelemetry.sdk.logs.LogLimits; import io.opentelemetry.sdk.logs.SdkLoggerProvider; import io.opentelemetry.sdk.metrics.InstrumentSelector; @@ -68,34 +69,21 @@ class OpenTelemetryConfigurationFactoryTest { private final SpiHelper spiHelper = SpiHelper.create(OpenTelemetryConfigurationFactoryTest.class.getClassLoader()); - @Test - void create_Null() { - List closeables = new ArrayList<>(); - OpenTelemetrySdk expectedSdk = OpenTelemetrySdk.builder().build(); - cleanup.addCloseable(expectedSdk); - - OpenTelemetrySdk sdk = - OpenTelemetryConfigurationFactory.getInstance().create(null, spiHelper, closeables); - cleanup.addCloseable(sdk); - cleanup.addCloseables(closeables); - - assertThat(sdk.toString()).isEqualTo(expectedSdk.toString()); - } - @Test void create_InvalidFileFormat() { - List testCases = + List testCases = Arrays.asList( - new OpenTelemetryConfiguration(), new OpenTelemetryConfiguration().withFileFormat("1")); + new OpenTelemetryConfigurationModel(), + new OpenTelemetryConfigurationModel().withFileFormat("1")); List closeables = new ArrayList<>(); - for (OpenTelemetryConfiguration testCase : testCases) { + for (OpenTelemetryConfigurationModel testCase : testCases) { assertThatThrownBy( () -> OpenTelemetryConfigurationFactory.getInstance() .create(testCase, spiHelper, closeables)) - .isInstanceOf(ConfigurationException.class) - .hasMessage("Unsupported file format. Supported formats include: 0.1"); + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage("Unsupported file format. Supported formats include: 0.3"); cleanup.addCloseables(closeables); } } @@ -103,19 +91,13 @@ void create_InvalidFileFormat() { @Test void create_Defaults() { List closeables = new ArrayList<>(); - OpenTelemetrySdk expectedSdk = - OpenTelemetrySdk.builder() - .setPropagators( - ContextPropagators.create( - TextMapPropagator.composite( - W3CTraceContextPropagator.getInstance(), - W3CBaggagePropagator.getInstance()))) - .build(); + OpenTelemetrySdk expectedSdk = OpenTelemetrySdk.builder().build(); cleanup.addCloseable(expectedSdk); OpenTelemetrySdk sdk = OpenTelemetryConfigurationFactory.getInstance() - .create(new OpenTelemetryConfiguration().withFileFormat("0.1"), spiHelper, closeables); + .create( + new OpenTelemetryConfigurationModel().withFileFormat("0.3"), spiHelper, closeables); cleanup.addCloseable(sdk); cleanup.addCloseables(closeables); @@ -131,20 +113,20 @@ void create_Disabled() { OpenTelemetrySdk sdk = OpenTelemetryConfigurationFactory.getInstance() .create( - new OpenTelemetryConfiguration() - .withFileFormat("0.1") + new OpenTelemetryConfigurationModel() + .withFileFormat("0.3") .withDisabled(true) // Logger provider configuration should be ignored since SDK is disabled .withLoggerProvider( - new LoggerProvider() + new LoggerProviderModel() .withProcessors( Collections.singletonList( - new LogRecordProcessor() + new LogRecordProcessorModel() .withSimple( - new SimpleLogRecordProcessor() + new SimpleLogRecordProcessorModel() .withExporter( - new LogRecordExporter() - .withOtlp(new Otlp())))))), + new LogRecordExporterModel() + .withOtlp(new OtlpModel())))))), spiHelper, closeables); cleanup.addCloseable(sdk); @@ -160,6 +142,10 @@ void create_Configured() { io.opentelemetry.sdk.resources.Resource.getDefault().toBuilder() .put("service.name", "my-service") .put("key", "val") + // resource attributes from resource ComponentProviders + .put("color", "red") + .put("shape", "square") + .put("order", "second") .build(); OpenTelemetrySdk expectedSdk = OpenTelemetrySdk.builder() @@ -183,7 +169,7 @@ void create_Configured() { .build()) .addLogRecordProcessor( io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor.builder( - OtlpGrpcLogRecordExporter.getDefault()) + OtlpHttpLogRecordExporter.getDefault()) .build()) .build()) .setTracerProvider( @@ -201,7 +187,7 @@ void create_Configured() { .setSampler(alwaysOn()) .addSpanProcessor( io.opentelemetry.sdk.trace.export.BatchSpanProcessor.builder( - OtlpGrpcSpanExporter.getDefault()) + OtlpHttpSpanExporter.getDefault()) .build()) .build()) .setMeterProvider( @@ -209,7 +195,7 @@ void create_Configured() { .setResource(expectedResource) .registerMetricReader( io.opentelemetry.sdk.metrics.export.PeriodicMetricReader.builder( - OtlpGrpcMetricExporter.getDefault()) + OtlpHttpMetricExporter.getDefault()) .build()) .registerView( InstrumentSelector.builder().setName("instrument-name").build(), @@ -221,68 +207,81 @@ void create_Configured() { OpenTelemetrySdk sdk = OpenTelemetryConfigurationFactory.getInstance() .create( - new OpenTelemetryConfiguration() - .withFileFormat("0.1") - .withPropagators( - Arrays.asList( - "tracecontext", "baggage", "ottrace", "b3multi", "b3", "jaeger")) + new OpenTelemetryConfigurationModel() + .withFileFormat("0.3") + .withPropagator( + new PropagatorModel() + .withComposite( + Arrays.asList( + "tracecontext", + "baggage", + "ottrace", + "b3multi", + "b3", + "jaeger"))) .withResource( - new Resource() + new ResourceModel() .withAttributes( - new Attributes() - .withServiceName("my-service") - .withAdditionalProperty("key", "val"))) + Arrays.asList( + new AttributeNameValueModel() + .withName("service.name") + .withValue("my-service"), + new AttributeNameValueModel() + .withName("key") + .withValue("val")))) .withLoggerProvider( - new LoggerProvider() + new LoggerProviderModel() .withLimits( - new LogRecordLimits() + new LogRecordLimitsModel() .withAttributeValueLengthLimit(1) .withAttributeCountLimit(2)) .withProcessors( Collections.singletonList( - new LogRecordProcessor() + new LogRecordProcessorModel() .withBatch( - new BatchLogRecordProcessor() + new BatchLogRecordProcessorModel() .withExporter( - new LogRecordExporter() - .withOtlp(new Otlp())))))) + new LogRecordExporterModel() + .withOtlp(new OtlpModel())))))) .withTracerProvider( - new TracerProvider() + new TracerProviderModel() .withLimits( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal - .model.SpanLimits() + .model.SpanLimitsModel() .withAttributeCountLimit(1) .withAttributeValueLengthLimit(2) .withEventCountLimit(3) .withLinkCountLimit(4) .withEventAttributeCountLimit(5) .withLinkAttributeCountLimit(6)) - .withSampler(new Sampler().withAlwaysOn(new AlwaysOn())) + .withSampler(new SamplerModel().withAlwaysOn(new AlwaysOnModel())) .withProcessors( Collections.singletonList( - new SpanProcessor() + new SpanProcessorModel() .withBatch( - new BatchSpanProcessor() + new BatchSpanProcessorModel() .withExporter( - new SpanExporter().withOtlp(new Otlp())))))) + new SpanExporterModel() + .withOtlp(new OtlpModel())))))) .withMeterProvider( - new MeterProvider() + new MeterProviderModel() .withReaders( Collections.singletonList( - new MetricReader() + new MetricReaderModel() .withPeriodic( - new PeriodicMetricReader() + new PeriodicMetricReaderModel() .withExporter( - new MetricExporter() - .withOtlp(new OtlpMetric()))))) + new PushMetricExporterModel() + .withOtlp(new OtlpMetricModel()))))) .withViews( Collections.singletonList( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal - .model.View() + .model.ViewModel() .withSelector( - new Selector().withInstrumentName("instrument-name")) + new SelectorModel() + .withInstrumentName("instrument-name")) .withStream( - new Stream() + new StreamModel() .withName("stream-name") .withAttributeKeys(null))))), spiHelper, diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/PropagatorFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/PropagatorFactoryTest.java new file mode 100644 index 00000000000..80377f417d0 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/PropagatorFactoryTest.java @@ -0,0 +1,55 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; + +import io.opentelemetry.api.baggage.propagation.W3CBaggagePropagator; +import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; +import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.context.propagation.TextMapPropagator; +import io.opentelemetry.extension.trace.propagation.B3Propagator; +import io.opentelemetry.extension.trace.propagation.JaegerPropagator; +import io.opentelemetry.extension.trace.propagation.OtTracePropagator; +import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PropagatorModel; +import java.util.Arrays; +import java.util.Collections; +import java.util.stream.Stream; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class PropagatorFactoryTest { + + private final SpiHelper spiHelper = + SpiHelper.create(PropagatorFactoryTest.class.getClassLoader()); + + @ParameterizedTest + @MethodSource("createArguments") + void create(PropagatorModel model, ContextPropagators expectedPropagators) { + ContextPropagators propagators = + PropagatorFactory.getInstance().create(model, spiHelper, Collections.emptyList()); + + assertThat(propagators.toString()).isEqualTo(expectedPropagators.toString()); + } + + private static Stream createArguments() { + return Stream.of( + Arguments.of( + new PropagatorModel() + .withComposite( + Arrays.asList("tracecontext", "baggage", "ottrace", "b3multi", "b3", "jaeger")), + ContextPropagators.create( + TextMapPropagator.composite( + W3CTraceContextPropagator.getInstance(), + W3CBaggagePropagator.getInstance(), + OtTracePropagator.getInstance(), + B3Propagator.injectingMultiHeaders(), + B3Propagator.injectingSingleHeader(), + JaegerPropagator.getInstance())))); + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/PropagatorsFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/PropagatorsFactoryTest.java deleted file mode 100644 index 293d40717ab..00000000000 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/PropagatorsFactoryTest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.extension.incubator.fileconfig; - -import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; - -import io.opentelemetry.api.baggage.propagation.W3CBaggagePropagator; -import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; -import io.opentelemetry.context.propagation.ContextPropagators; -import io.opentelemetry.context.propagation.TextMapPropagator; -import io.opentelemetry.extension.trace.propagation.B3Propagator; -import io.opentelemetry.extension.trace.propagation.JaegerPropagator; -import io.opentelemetry.extension.trace.propagation.OtTracePropagator; -import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.stream.Stream; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -class PropagatorsFactoryTest { - - private final SpiHelper spiHelper = - SpiHelper.create(PropagatorsFactoryTest.class.getClassLoader()); - - @ParameterizedTest - @MethodSource("createArguments") - void create(List model, ContextPropagators expectedPropagators) { - ContextPropagators propagators = - PropagatorsFactory.getInstance().create(model, spiHelper, Collections.emptyList()); - - assertThat(propagators.toString()).isEqualTo(expectedPropagators.toString()); - } - - private static Stream createArguments() { - return Stream.of( - Arguments.of( - null, - ContextPropagators.create( - TextMapPropagator.composite( - W3CTraceContextPropagator.getInstance(), W3CBaggagePropagator.getInstance()))), - Arguments.of( - Collections.emptyList(), - ContextPropagators.create( - TextMapPropagator.composite( - W3CTraceContextPropagator.getInstance(), W3CBaggagePropagator.getInstance()))), - Arguments.of(Collections.singletonList("none"), ContextPropagators.noop()), - Arguments.of( - Arrays.asList("tracecontext", "baggage", "ottrace", "b3multi", "b3", "jaeger"), - ContextPropagators.create( - TextMapPropagator.composite( - W3CTraceContextPropagator.getInstance(), - W3CBaggagePropagator.getInstance(), - OtTracePropagator.getInstance(), - B3Propagator.injectingMultiHeaders(), - B3Propagator.injectingSingleHeader(), - JaegerPropagator.getInstance())))); - } - - @Test - void create_NoneAndOther() { - assertThatThrownBy( - () -> - PropagatorsFactory.getInstance() - .create(Arrays.asList("none", "foo"), spiHelper, Collections.emptyList())) - .isInstanceOf(ConfigurationException.class) - .hasMessage("propagators contains \"none\" along with other propagators"); - } - - @Test - void create_UnknownSpiPropagator() { - assertThatThrownBy( - () -> - PropagatorsFactory.getInstance() - .create(Collections.singletonList("foo"), spiHelper, Collections.emptyList())) - .isInstanceOf(ConfigurationException.class) - .hasMessage("Unrecognized value for otel.propagators: foo"); - } -} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ResourceFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ResourceFactoryTest.java index f1329ffcc77..7c0b954faf9 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ResourceFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ResourceFactoryTest.java @@ -6,41 +6,138 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; -import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Attributes; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeNameValueModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.DetectorAttributesModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.DetectorsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ResourceModel; import io.opentelemetry.sdk.resources.Resource; +import java.util.Arrays; import java.util.Collections; +import java.util.List; +import java.util.stream.Stream; +import javax.annotation.Nullable; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; class ResourceFactoryTest { - @Test - void create_Null() { - assertThat( - ResourceFactory.getInstance() - .create(null, mock(SpiHelper.class), Collections.emptyList())) - .isEqualTo(Resource.getDefault()); - } + private SpiHelper spiHelper = SpiHelper.create(ResourceFactoryTest.class.getClassLoader()); @Test void create() { + spiHelper = spy(spiHelper); assertThat( ResourceFactory.getInstance() .create( - new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .Resource() + new ResourceModel() .withAttributes( - new Attributes() - .withServiceName("my-service") - .withAdditionalProperty("key", "val")), - mock(SpiHelper.class), + Arrays.asList( + new AttributeNameValueModel() + .withName("service.name") + .withValue("my-service"), + new AttributeNameValueModel().withName("key").withValue("val"), + new AttributeNameValueModel() + .withName("shape") + .withValue("circle"))), + spiHelper, Collections.emptyList())) .isEqualTo( Resource.getDefault().toBuilder() .put("service.name", "my-service") .put("key", "val") + .put("shape", "circle") + // From ResourceComponentProvider + .put("color", "red") + // From ResourceOrderedSecondComponentProvider, which takes priority over + // ResourceOrderedFirstComponentProvider + .put("order", "second") .build()); } + + @ParameterizedTest + @MethodSource("createWithDetectorsArgs") + void createWithDetectors( + @Nullable List included, @Nullable List excluded, Resource expectedResource) { + ResourceModel resourceModel = + new ResourceModel() + .withDetectors( + new DetectorsModel() + .withAttributes( + new DetectorAttributesModel() + .withIncluded(included) + .withExcluded(excluded))); + Resource resource = + ResourceFactory.getInstance().create(resourceModel, spiHelper, Collections.emptyList()); + assertThat(resource).isEqualTo(expectedResource); + } + + private static Stream createWithDetectorsArgs() { + return Stream.of( + Arguments.of( + null, + null, + Resource.getDefault().toBuilder() + .put("color", "red") + .put("shape", "square") + .put("order", "second") + .build()), + Arguments.of( + Collections.singletonList("color"), + null, + Resource.getDefault().toBuilder().put("color", "red").build()), + Arguments.of( + Arrays.asList("color", "shape"), + null, + Resource.getDefault().toBuilder().put("color", "red").put("shape", "square").build()), + Arguments.of( + null, + Collections.singletonList("color"), + Resource.getDefault().toBuilder() + .put("shape", "square") + .put("order", "second") + .build()), + Arguments.of( + null, + Arrays.asList("color", "shape"), + Resource.getDefault().toBuilder().put("order", "second").build()), + Arguments.of( + Collections.singletonList("color"), + Collections.singletonList("color"), + Resource.getDefault().toBuilder().build()), + Arguments.of( + Arrays.asList("color", "shape"), + Collections.singletonList("color"), + Resource.getDefault().toBuilder().put("shape", "square").build()), + Arguments.of( + Collections.singletonList("c*"), + null, + Resource.getDefault().toBuilder().put("color", "red").build()), + Arguments.of( + Collections.singletonList("c?lor"), + null, + Resource.getDefault().toBuilder().put("color", "red").build()), + Arguments.of( + null, + Collections.singletonList("c*"), + Resource.getDefault().toBuilder() + .put("shape", "square") + .put("order", "second") + .build()), + Arguments.of( + null, + Collections.singletonList("c?lor"), + Resource.getDefault().toBuilder() + .put("shape", "square") + .put("order", "second") + .build()), + Arguments.of( + Collections.singletonList("*o*"), + Collections.singletonList("order"), + Resource.getDefault().toBuilder().put("color", "red").build())); + } } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SamplerFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SamplerFactoryTest.java index c5c45eb1ae6..f57073d5246 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SamplerFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SamplerFactoryTest.java @@ -9,16 +9,17 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.google.common.collect.ImmutableMap; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; import io.opentelemetry.internal.testing.CleanupExtension; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOff; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOn; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.JaegerRemote; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ParentBased; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Sampler; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TraceIdRatioBased; +import io.opentelemetry.sdk.extension.incubator.fileconfig.component.SamplerComponentProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOffModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOnModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.JaegerRemoteModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ParentBasedModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SamplerModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TraceIdRatioBasedModel; import io.opentelemetry.sdk.extension.trace.jaeger.sampler.JaegerRemoteSampler; import java.io.Closeable; import java.time.Duration; @@ -44,7 +45,7 @@ class SamplerFactoryTest { @ParameterizedTest @MethodSource("createArguments") void create( - @Nullable Sampler model, io.opentelemetry.sdk.trace.samplers.Sampler expectedSampler) { + @Nullable SamplerModel model, io.opentelemetry.sdk.trace.samplers.Sampler expectedSampler) { // Some samplers like JaegerRemoteSampler are Closeable - ensure these get cleaned up if (expectedSampler instanceof Closeable) { cleanup.addCloseable((Closeable) expectedSampler); @@ -61,44 +62,45 @@ void create( private static Stream createArguments() { return Stream.of( Arguments.of( - null, - io.opentelemetry.sdk.trace.samplers.Sampler.parentBased( - io.opentelemetry.sdk.trace.samplers.Sampler.alwaysOn())), - Arguments.of( - new Sampler().withAlwaysOn(new AlwaysOn()), + new SamplerModel().withAlwaysOn(new AlwaysOnModel()), io.opentelemetry.sdk.trace.samplers.Sampler.alwaysOn()), Arguments.of( - new Sampler().withAlwaysOff(new AlwaysOff()), + new SamplerModel().withAlwaysOff(new AlwaysOffModel()), io.opentelemetry.sdk.trace.samplers.Sampler.alwaysOff()), Arguments.of( - new Sampler().withTraceIdRatioBased(new TraceIdRatioBased()), + new SamplerModel().withTraceIdRatioBased(new TraceIdRatioBasedModel()), io.opentelemetry.sdk.trace.samplers.Sampler.traceIdRatioBased(1.0d)), Arguments.of( - new Sampler().withTraceIdRatioBased(new TraceIdRatioBased().withRatio(0.5d)), + new SamplerModel().withTraceIdRatioBased(new TraceIdRatioBasedModel().withRatio(0.5d)), io.opentelemetry.sdk.trace.samplers.Sampler.traceIdRatioBased(0.5)), Arguments.of( - new Sampler().withParentBased(new ParentBased()), + new SamplerModel().withParentBased(new ParentBasedModel()), io.opentelemetry.sdk.trace.samplers.Sampler.parentBased( io.opentelemetry.sdk.trace.samplers.Sampler.alwaysOn())), Arguments.of( - new Sampler() + new SamplerModel() .withParentBased( - new ParentBased() + new ParentBasedModel() .withRoot( - new Sampler() - .withTraceIdRatioBased(new TraceIdRatioBased().withRatio(0.1d))) + new SamplerModel() + .withTraceIdRatioBased( + new TraceIdRatioBasedModel().withRatio(0.1d))) .withRemoteParentSampled( - new Sampler() - .withTraceIdRatioBased(new TraceIdRatioBased().withRatio(0.2d))) + new SamplerModel() + .withTraceIdRatioBased( + new TraceIdRatioBasedModel().withRatio(0.2d))) .withRemoteParentNotSampled( - new Sampler() - .withTraceIdRatioBased(new TraceIdRatioBased().withRatio(0.3d))) + new SamplerModel() + .withTraceIdRatioBased( + new TraceIdRatioBasedModel().withRatio(0.3d))) .withLocalParentSampled( - new Sampler() - .withTraceIdRatioBased(new TraceIdRatioBased().withRatio(0.4d))) + new SamplerModel() + .withTraceIdRatioBased( + new TraceIdRatioBasedModel().withRatio(0.4d))) .withLocalParentNotSampled( - new Sampler() - .withTraceIdRatioBased(new TraceIdRatioBased().withRatio(0.5d)))), + new SamplerModel() + .withTraceIdRatioBased( + new TraceIdRatioBasedModel().withRatio(0.5d)))), io.opentelemetry.sdk.trace.samplers.Sampler.parentBasedBuilder( io.opentelemetry.sdk.trace.samplers.Sampler.traceIdRatioBased(0.1d)) .setRemoteParentSampled( @@ -111,32 +113,50 @@ private static Stream createArguments() { io.opentelemetry.sdk.trace.samplers.Sampler.traceIdRatioBased(0.5d)) .build()), Arguments.of( - new Sampler() + new SamplerModel() .withJaegerRemote( - new JaegerRemote() + new JaegerRemoteModel() .withEndpoint("http://jaeger-remote-endpoint") .withInterval(10_000) - .withInitialSampler(new Sampler().withAlwaysOff(new AlwaysOff()))), + .withInitialSampler( + new SamplerModel().withAlwaysOff(new AlwaysOffModel()))), JaegerRemoteSampler.builder() .setEndpoint("http://jaeger-remote-endpoint") .setPollingInterval(Duration.ofSeconds(10)) + .setInitialSampler(io.opentelemetry.sdk.trace.samplers.Sampler.alwaysOff()) .build())); } @Test - void create_SpiExporter() { + void create_SpiExporter_Unknown() { List closeables = new ArrayList<>(); assertThatThrownBy( () -> SamplerFactory.getInstance() .create( - new Sampler() - .withAdditionalProperty("test", ImmutableMap.of("key1", "value1")), + new SamplerModel() + .withAdditionalProperty( + "unknown_key", ImmutableMap.of("key1", "value1")), spiHelper, new ArrayList<>())) - .isInstanceOf(ConfigurationException.class) - .hasMessage("Unrecognized sampler(s): [test]"); + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage( + "No component provider detected for io.opentelemetry.sdk.trace.samplers.Sampler with name \"unknown_key\"."); cleanup.addCloseables(closeables); } + + @Test + void create_SpiExporter_Valid() { + io.opentelemetry.sdk.trace.samplers.Sampler sampler = + SamplerFactory.getInstance() + .create( + new SamplerModel() + .withAdditionalProperty("test", ImmutableMap.of("key1", "value1")), + spiHelper, + new ArrayList<>()); + assertThat(sampler).isInstanceOf(SamplerComponentProvider.TestSampler.class); + assertThat(((SamplerComponentProvider.TestSampler) sampler).config.getString("key1")) + .isEqualTo("value1"); + } } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanExporterFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanExporterFactoryTest.java index 09c47433003..22089676920 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanExporterFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanExporterFactoryTest.java @@ -8,26 +8,25 @@ import static io.opentelemetry.sdk.extension.incubator.fileconfig.FileConfigTestUtil.createTempFileWithContent; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableMap; import com.linecorp.armeria.testing.junit5.server.SelfSignedCertificateExtension; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; import io.opentelemetry.exporter.logging.LoggingSpanExporter; import io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporter; -import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; import io.opentelemetry.exporter.zipkin.ZipkinSpanExporter; import io.opentelemetry.internal.testing.CleanupExtension; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Console; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Headers; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Otlp; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Zipkin; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.component.SpanExporterComponentProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ConsoleModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.NameStringValuePairModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ZipkinModel; import io.opentelemetry.sdk.trace.export.SpanExporter; import java.io.Closeable; import java.io.IOException; @@ -35,12 +34,17 @@ import java.security.cert.CertificateEncodingException; import java.time.Duration; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.stream.Collectors; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.api.io.TempDir; import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; @@ -57,21 +61,46 @@ class SpanExporterFactoryTest { @RegisterExtension CleanupExtension cleanup = new CleanupExtension(); - private SpiHelper spiHelper = SpiHelper.create(SpanExporterFactoryTest.class.getClassLoader()); + private final SpiHelper spiHelper = + spy(SpiHelper.create(SpanExporterFactoryTest.class.getClassLoader())); + private List> loadedComponentProviders = Collections.emptyList(); + + @BeforeEach + @SuppressWarnings("unchecked") + void setup() { + when(spiHelper.load(ComponentProvider.class)) + .thenAnswer( + invocation -> { + List> result = + (List>) invocation.callRealMethod(); + loadedComponentProviders = + result.stream().map(Mockito::spy).collect(Collectors.toList()); + return loadedComponentProviders; + }); + } + + private ComponentProvider getComponentProvider(String name, Class type) { + return loadedComponentProviders.stream() + .filter( + componentProvider -> + componentProvider.getName().equals(name) + && componentProvider.getType().equals(type)) + .findFirst() + .orElseThrow(IllegalStateException::new); + } @Test void create_OtlpDefaults() { - spiHelper = spy(spiHelper); List closeables = new ArrayList<>(); - OtlpGrpcSpanExporter expectedExporter = OtlpGrpcSpanExporter.getDefault(); + OtlpHttpSpanExporter expectedExporter = OtlpHttpSpanExporter.getDefault(); cleanup.addCloseable(expectedExporter); SpanExporter exporter = SpanExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .SpanExporter() - .withOtlp(new Otlp()), + .SpanExporterModel() + .withOtlp(new OtlpModel()), spiHelper, closeables); cleanup.addCloseable(exporter); @@ -79,25 +108,24 @@ void create_OtlpDefaults() { assertThat(exporter.toString()).isEqualTo(expectedExporter.toString()); - ArgumentCaptor configCaptor = ArgumentCaptor.forClass(ConfigProperties.class); - verify(spiHelper) - .loadConfigurable( - eq(ConfigurableSpanExporterProvider.class), any(), any(), configCaptor.capture()); - ConfigProperties configProperties = configCaptor.getValue(); - assertThat(configProperties.getString("otel.exporter.otlp.traces.protocol")).isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.endpoint")).isNull(); - assertThat(configProperties.getMap("otel.exporter.otlp.traces.headers")).isEmpty(); - assertThat(configProperties.getString("otel.exporter.otlp.traces.compression")).isNull(); - assertThat(configProperties.getDuration("otel.exporter.otlp.traces.timeout")).isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.traces.certificate")).isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.traces.client.key")).isNull(); - assertThat(configProperties.getString("otel.exporter.otlp.traces.client.certificate")).isNull(); + ArgumentCaptor configCaptor = + ArgumentCaptor.forClass(DeclarativeConfigProperties.class); + ComponentProvider componentProvider = getComponentProvider("otlp", SpanExporter.class); + verify(componentProvider).create(configCaptor.capture()); + DeclarativeConfigProperties configProperties = configCaptor.getValue(); + assertThat(configProperties.getString("protocol")).isNull(); + assertThat(configProperties.getString("endpoint")).isNull(); + assertThat(configProperties.getStructured("headers")).isNull(); + assertThat(configProperties.getString("compression")).isNull(); + assertThat(configProperties.getInt("timeout")).isNull(); + assertThat(configProperties.getString("certificate")).isNull(); + assertThat(configProperties.getString("client_key")).isNull(); + assertThat(configProperties.getString("client_certificate")).isNull(); } @Test void create_OtlpConfigured(@TempDir Path tempDir) throws CertificateEncodingException, IOException { - spiHelper = spy(spiHelper); List closeables = new ArrayList<>(); OtlpHttpSpanExporter expectedExporter = OtlpHttpSpanExporter.builder() @@ -123,15 +151,19 @@ void create_OtlpConfigured(@TempDir Path tempDir) SpanExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .SpanExporter() + .SpanExporterModel() .withOtlp( - new Otlp() + new OtlpModel() .withProtocol("http/protobuf") - .withEndpoint("http://example:4318") + .withEndpoint("http://example:4318/v1/traces") .withHeaders( - new Headers() - .withAdditionalProperty("key1", "value1") - .withAdditionalProperty("key2", "value2")) + Arrays.asList( + new NameStringValuePairModel() + .withName("key1") + .withValue("value1"), + new NameStringValuePairModel() + .withName("key2") + .withValue("value2"))) .withCompression("gzip") .withTimeout(15_000) .withCertificate(certificatePath) @@ -144,32 +176,34 @@ void create_OtlpConfigured(@TempDir Path tempDir) assertThat(exporter.toString()).isEqualTo(expectedExporter.toString()); - ArgumentCaptor configCaptor = ArgumentCaptor.forClass(ConfigProperties.class); - verify(spiHelper) - .loadConfigurable( - eq(ConfigurableSpanExporterProvider.class), any(), any(), configCaptor.capture()); - ConfigProperties configProperties = configCaptor.getValue(); - assertThat(configProperties.getString("otel.exporter.otlp.traces.protocol")) - .isEqualTo("http/protobuf"); - assertThat(configProperties.getString("otel.exporter.otlp.endpoint")) - .isEqualTo("http://example:4318"); - assertThat(configProperties.getMap("otel.exporter.otlp.traces.headers")) - .isEqualTo(ImmutableMap.of("key1", "value1", "key2", "value2")); - assertThat(configProperties.getString("otel.exporter.otlp.traces.compression")) - .isEqualTo("gzip"); - assertThat(configProperties.getDuration("otel.exporter.otlp.traces.timeout")) - .isEqualTo(Duration.ofSeconds(15)); - assertThat(configProperties.getString("otel.exporter.otlp.traces.certificate")) - .isEqualTo(certificatePath); - assertThat(configProperties.getString("otel.exporter.otlp.traces.client.key")) - .isEqualTo(clientKeyPath); - assertThat(configProperties.getString("otel.exporter.otlp.traces.client.certificate")) - .isEqualTo(clientCertificatePath); + ArgumentCaptor configCaptor = + ArgumentCaptor.forClass(DeclarativeConfigProperties.class); + ComponentProvider componentProvider = getComponentProvider("otlp", SpanExporter.class); + verify(componentProvider).create(configCaptor.capture()); + DeclarativeConfigProperties configProperties = configCaptor.getValue(); + assertThat(configProperties.getString("protocol")).isEqualTo("http/protobuf"); + assertThat(configProperties.getString("endpoint")).isEqualTo("http://example:4318/v1/traces"); + List headers = configProperties.getStructuredList("headers"); + assertThat(headers) + .isNotNull() + .satisfiesExactly( + header -> { + assertThat(header.getString("name")).isEqualTo("key1"); + assertThat(header.getString("value")).isEqualTo("value1"); + }, + header -> { + assertThat(header.getString("name")).isEqualTo("key2"); + assertThat(header.getString("value")).isEqualTo("value2"); + }); + assertThat(configProperties.getString("compression")).isEqualTo("gzip"); + assertThat(configProperties.getInt("timeout")).isEqualTo(Duration.ofSeconds(15).toMillis()); + assertThat(configProperties.getString("certificate")).isEqualTo(certificatePath); + assertThat(configProperties.getString("client_key")).isEqualTo(clientKeyPath); + assertThat(configProperties.getString("client_certificate")).isEqualTo(clientCertificatePath); } @Test void create_Console() { - spiHelper = spy(spiHelper); List closeables = new ArrayList<>(); LoggingSpanExporter expectedExporter = LoggingSpanExporter.create(); cleanup.addCloseable(expectedExporter); @@ -178,8 +212,8 @@ void create_Console() { SpanExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .SpanExporter() - .withConsole(new Console()), + .SpanExporterModel() + .withConsole(new ConsoleModel()), spiHelper, closeables); cleanup.addCloseable(exporter); @@ -190,7 +224,6 @@ void create_Console() { @Test void create_ZipkinDefaults() { - spiHelper = spy(spiHelper); List closeables = new ArrayList<>(); ZipkinSpanExporter expectedExporter = ZipkinSpanExporter.builder().build(); @@ -200,8 +233,8 @@ void create_ZipkinDefaults() { SpanExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .SpanExporter() - .withZipkin(new Zipkin()), + .SpanExporterModel() + .withZipkin(new ZipkinModel()), spiHelper, closeables); cleanup.addCloseable(exporter); @@ -209,18 +242,17 @@ void create_ZipkinDefaults() { assertThat(exporter.toString()).isEqualTo(expectedExporter.toString()); - ArgumentCaptor configCaptor = ArgumentCaptor.forClass(ConfigProperties.class); - verify(spiHelper) - .loadConfigurable( - eq(ConfigurableSpanExporterProvider.class), any(), any(), configCaptor.capture()); - ConfigProperties configProperties = configCaptor.getValue(); - assertThat(configProperties.getString("otel.exporter.zipkin.endpoint")).isNull(); - assertThat(configProperties.getDuration("otel.exporter.zipkin.timeout")).isNull(); + ArgumentCaptor configCaptor = + ArgumentCaptor.forClass(DeclarativeConfigProperties.class); + ComponentProvider componentProvider = getComponentProvider("zipkin", SpanExporter.class); + verify(componentProvider).create(configCaptor.capture()); + DeclarativeConfigProperties configProperties = configCaptor.getValue(); + assertThat(configProperties.getString("endpoint")).isNull(); + assertThat(configProperties.getLong("timeout")).isNull(); } @Test void create_ZipkinConfigured() { - spiHelper = spy(spiHelper); List closeables = new ArrayList<>(); ZipkinSpanExporter expectedExporter = ZipkinSpanExporter.builder() @@ -233,9 +265,9 @@ void create_ZipkinConfigured() { SpanExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .SpanExporter() + .SpanExporterModel() .withZipkin( - new Zipkin() + new ZipkinModel() .withEndpoint("http://zipkin:9411/v1/v2/spans") .withTimeout(15_000)), spiHelper, @@ -245,19 +277,17 @@ void create_ZipkinConfigured() { assertThat(exporter.toString()).isEqualTo(expectedExporter.toString()); - ArgumentCaptor configCaptor = ArgumentCaptor.forClass(ConfigProperties.class); - verify(spiHelper) - .loadConfigurable( - eq(ConfigurableSpanExporterProvider.class), any(), any(), configCaptor.capture()); - ConfigProperties configProperties = configCaptor.getValue(); - assertThat(configProperties.getString("otel.exporter.zipkin.endpoint")) - .isEqualTo("http://zipkin:9411/v1/v2/spans"); - assertThat(configProperties.getDuration("otel.exporter.zipkin.timeout")) - .isEqualTo(Duration.ofSeconds(15)); + ArgumentCaptor configCaptor = + ArgumentCaptor.forClass(DeclarativeConfigProperties.class); + ComponentProvider componentProvider = getComponentProvider("zipkin", SpanExporter.class); + verify(componentProvider).create(configCaptor.capture()); + DeclarativeConfigProperties configProperties = configCaptor.getValue(); + assertThat(configProperties.getString("endpoint")).isEqualTo("http://zipkin:9411/v1/v2/spans"); + assertThat(configProperties.getLong("timeout")).isEqualTo(15_000); } @Test - void create_SpiExporter() { + void create_SpiExporter_Unknown() { List closeables = new ArrayList<>(); assertThatThrownBy( @@ -265,12 +295,31 @@ void create_SpiExporter() { SpanExporterFactory.getInstance() .create( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .SpanExporter() - .withAdditionalProperty("test", ImmutableMap.of("key1", "value1")), + .SpanExporterModel() + .withAdditionalProperty( + "unknown_key", ImmutableMap.of("key1", "value1")), spiHelper, new ArrayList<>())) - .isInstanceOf(ConfigurationException.class) - .hasMessage("Unrecognized span exporter(s): [test]"); + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage( + "No component provider detected for io.opentelemetry.sdk.trace.export.SpanExporter with name \"unknown_key\"."); cleanup.addCloseables(closeables); } + + @Test + void create_SpiExporter_Valid() { + SpanExporter spanExporter = + SpanExporterFactory.getInstance() + .create( + new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model + .SpanExporterModel() + .withAdditionalProperty("test", ImmutableMap.of("key1", "value1")), + spiHelper, + new ArrayList<>()); + assertThat(spanExporter).isInstanceOf(SpanExporterComponentProvider.TestSpanExporter.class); + assertThat( + ((SpanExporterComponentProvider.TestSpanExporter) spanExporter) + .config.getString("key1")) + .isEqualTo("value1"); + } } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanLimitsFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanLimitsFactoryTest.java index 1fc09f61f45..c6879ecc529 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanLimitsFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanLimitsFactoryTest.java @@ -9,8 +9,8 @@ import static org.mockito.Mockito.mock; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanLimits; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanLimitsModel; import java.util.Collections; import java.util.stream.Stream; import org.junit.jupiter.params.ParameterizedTest; @@ -32,25 +32,28 @@ void create( private static Stream createArguments() { return Stream.of( - Arguments.of(null, io.opentelemetry.sdk.trace.SpanLimits.getDefault()), Arguments.of( SpanLimitsAndAttributeLimits.create(null, null), io.opentelemetry.sdk.trace.SpanLimits.getDefault()), Arguments.of( - SpanLimitsAndAttributeLimits.create(new AttributeLimits(), new SpanLimits()), + SpanLimitsAndAttributeLimits.create(new AttributeLimitsModel(), new SpanLimitsModel()), io.opentelemetry.sdk.trace.SpanLimits.getDefault()), Arguments.of( SpanLimitsAndAttributeLimits.create( - new AttributeLimits().withAttributeCountLimit(1).withAttributeValueLengthLimit(2), - new SpanLimits()), + new AttributeLimitsModel() + .withAttributeCountLimit(1) + .withAttributeValueLengthLimit(2), + new SpanLimitsModel()), io.opentelemetry.sdk.trace.SpanLimits.builder() .setMaxNumberOfAttributes(1) .setMaxAttributeValueLength(2) .build()), Arguments.of( SpanLimitsAndAttributeLimits.create( - new AttributeLimits().withAttributeCountLimit(1).withAttributeValueLengthLimit(2), - new SpanLimits() + new AttributeLimitsModel() + .withAttributeCountLimit(1) + .withAttributeValueLengthLimit(2), + new SpanLimitsModel() .withAttributeCountLimit(3) .withAttributeValueLengthLimit(4) .withEventCountLimit(5) diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanProcessorFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanProcessorFactoryTest.java index 3c6059cc714..a1b7bb32ba4 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanProcessorFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/SpanProcessorFactoryTest.java @@ -9,20 +9,22 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.google.common.collect.ImmutableMap; -import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporter; import io.opentelemetry.internal.testing.CleanupExtension; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchSpanProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Otlp; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleSpanProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessor; +import io.opentelemetry.sdk.extension.incubator.fileconfig.component.SpanProcessorComponentProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchSpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleSpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessorModel; import java.io.Closeable; import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; @@ -33,34 +35,17 @@ class SpanProcessorFactoryTest { private final SpiHelper spiHelper = SpiHelper.create(SpanProcessorFactoryTest.class.getClassLoader()); - @Test - void create_Null() { - List closeables = new ArrayList<>(); - - io.opentelemetry.sdk.trace.SpanProcessor processor = - SpanProcessorFactory.getInstance().create(null, spiHelper, Collections.emptyList()); - cleanup.addCloseable(processor); - cleanup.addCloseables(closeables); - - assertThat(processor.toString()) - .isEqualTo(io.opentelemetry.sdk.trace.SpanProcessor.composite().toString()); - } - @Test void create_BatchNullExporter() { - List closeables = new ArrayList<>(); - - io.opentelemetry.sdk.trace.SpanProcessor processor = - SpanProcessorFactory.getInstance() - .create( - new SpanProcessor().withBatch(new BatchSpanProcessor()), - spiHelper, - Collections.emptyList()); - cleanup.addCloseable(processor); - cleanup.addCloseables(closeables); - - assertThat(processor.toString()) - .isEqualTo(io.opentelemetry.sdk.trace.SpanProcessor.composite().toString()); + assertThatThrownBy( + () -> + SpanProcessorFactory.getInstance() + .create( + new SpanProcessorModel().withBatch(new BatchSpanProcessorModel()), + spiHelper, + Collections.emptyList())) + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage("batch span processor exporter is required but is null"); } @Test @@ -68,17 +53,17 @@ void create_BatchDefaults() { List closeables = new ArrayList<>(); io.opentelemetry.sdk.trace.export.BatchSpanProcessor expectedProcessor = io.opentelemetry.sdk.trace.export.BatchSpanProcessor.builder( - OtlpGrpcSpanExporter.getDefault()) + OtlpHttpSpanExporter.getDefault()) .build(); cleanup.addCloseable(expectedProcessor); io.opentelemetry.sdk.trace.SpanProcessor processor = SpanProcessorFactory.getInstance() .create( - new SpanProcessor() + new SpanProcessorModel() .withBatch( - new BatchSpanProcessor() - .withExporter(new SpanExporter().withOtlp(new Otlp()))), + new BatchSpanProcessorModel() + .withExporter(new SpanExporterModel().withOtlp(new OtlpModel()))), spiHelper, closeables); cleanup.addCloseable(processor); @@ -92,7 +77,7 @@ void create_BatchConfigured() { List closeables = new ArrayList<>(); io.opentelemetry.sdk.trace.export.BatchSpanProcessor expectedProcessor = io.opentelemetry.sdk.trace.export.BatchSpanProcessor.builder( - OtlpGrpcSpanExporter.getDefault()) + OtlpHttpSpanExporter.getDefault()) .setScheduleDelay(Duration.ofMillis(1)) .setMaxExportBatchSize(2) .setExporterTimeout(Duration.ofMillis(3)) @@ -102,10 +87,10 @@ void create_BatchConfigured() { io.opentelemetry.sdk.trace.SpanProcessor processor = SpanProcessorFactory.getInstance() .create( - new SpanProcessor() + new SpanProcessorModel() .withBatch( - new BatchSpanProcessor() - .withExporter(new SpanExporter().withOtlp(new Otlp())) + new BatchSpanProcessorModel() + .withExporter(new SpanExporterModel().withOtlp(new OtlpModel())) .withScheduleDelay(1) .withMaxExportBatchSize(2) .withExportTimeout(3)), @@ -119,19 +104,15 @@ void create_BatchConfigured() { @Test void create_SimpleNullExporter() { - List closeables = new ArrayList<>(); - - io.opentelemetry.sdk.trace.SpanProcessor processor = - SpanProcessorFactory.getInstance() - .create( - new SpanProcessor().withSimple(new SimpleSpanProcessor()), - spiHelper, - Collections.emptyList()); - cleanup.addCloseable(processor); - cleanup.addCloseables(closeables); - - assertThat(processor.toString()) - .isEqualTo(io.opentelemetry.sdk.trace.SpanProcessor.composite().toString()); + assertThatThrownBy( + () -> + SpanProcessorFactory.getInstance() + .create( + new SpanProcessorModel().withSimple(new SimpleSpanProcessorModel()), + spiHelper, + Collections.emptyList())) + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage("simple span processor exporter is required but is null"); } @Test @@ -139,16 +120,16 @@ void create_SimpleConfigured() { List closeables = new ArrayList<>(); io.opentelemetry.sdk.trace.SpanProcessor expectedProcessor = io.opentelemetry.sdk.trace.export.SimpleSpanProcessor.create( - OtlpGrpcSpanExporter.getDefault()); + OtlpHttpSpanExporter.getDefault()); cleanup.addCloseable(expectedProcessor); io.opentelemetry.sdk.trace.SpanProcessor processor = SpanProcessorFactory.getInstance() .create( - new SpanProcessor() + new SpanProcessorModel() .withSimple( - new SimpleSpanProcessor() - .withExporter(new SpanExporter().withOtlp(new Otlp()))), + new SimpleSpanProcessorModel() + .withExporter(new SpanExporterModel().withOtlp(new OtlpModel()))), spiHelper, closeables); cleanup.addCloseable(processor); @@ -158,19 +139,34 @@ void create_SimpleConfigured() { } @Test - void create_SpiProcessor() { - List closeables = new ArrayList<>(); - + void create_SpiProcessor_Unknown() { assertThatThrownBy( () -> SpanProcessorFactory.getInstance() .create( - new SpanProcessor() - .withAdditionalProperty("test", ImmutableMap.of("key1", "value1")), + new SpanProcessorModel() + .withAdditionalProperty( + "unknown_key", ImmutableMap.of("key1", "value1")), spiHelper, - closeables)) - .isInstanceOf(ConfigurationException.class) - .hasMessage("Unrecognized span processor(s): [test]"); - cleanup.addCloseables(closeables); + new ArrayList<>())) + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage( + "No component provider detected for io.opentelemetry.sdk.trace.SpanProcessor with name \"unknown_key\"."); + } + + @Test + void create_SpiExporter_Valid() { + io.opentelemetry.sdk.trace.SpanProcessor spanProcessor = + SpanProcessorFactory.getInstance() + .create( + new SpanProcessorModel() + .withAdditionalProperty("test", ImmutableMap.of("key1", "value1")), + spiHelper, + new ArrayList<>()); + assertThat(spanProcessor).isInstanceOf(SpanProcessorComponentProvider.TestSpanProcessor.class); + Assertions.assertThat( + ((SpanProcessorComponentProvider.TestSpanProcessor) spanProcessor) + .config.getString("key1")) + .isEqualTo("value1"); } } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TestDeclarativeConfigurationCustomizerProvider.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TestDeclarativeConfigurationCustomizerProvider.java new file mode 100644 index 00000000000..33187b73ba2 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TestDeclarativeConfigurationCustomizerProvider.java @@ -0,0 +1,42 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeNameValueModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ResourceModel; +import java.util.ArrayList; +import java.util.List; + +public class TestDeclarativeConfigurationCustomizerProvider + implements DeclarativeConfigurationCustomizerProvider { + @Override + public void customize(DeclarativeConfigurationCustomizer customizer) { + customizer.addModelCustomizer( + model -> { + ResourceModel resource = model.getResource(); + if (resource == null) { + resource = new ResourceModel(); + model.withResource(resource); + } + List attributes = resource.getAttributes(); + if (attributes == null) { + attributes = new ArrayList<>(); + resource.withAttributes(attributes); + } + attributes.add( + new AttributeNameValueModel() + .withName("foo") + .withType(AttributeNameValueModel.Type.STRING) + .withValue("bar")); + attributes.add( + new AttributeNameValueModel() + .withName("color") + .withType(AttributeNameValueModel.Type.STRING) + .withValue("blue")); + return model; + }); + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TextMapPropagatorFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TextMapPropagatorFactoryTest.java new file mode 100644 index 00000000000..17d21a7f364 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TextMapPropagatorFactoryTest.java @@ -0,0 +1,97 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import io.opentelemetry.api.baggage.propagation.W3CBaggagePropagator; +import io.opentelemetry.api.incubator.config.DeclarativeConfigException; +import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; +import io.opentelemetry.context.propagation.TextMapPropagator; +import io.opentelemetry.extension.trace.propagation.B3Propagator; +import io.opentelemetry.extension.trace.propagation.JaegerPropagator; +import io.opentelemetry.extension.trace.propagation.OtTracePropagator; +import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; +import io.opentelemetry.sdk.extension.incubator.fileconfig.component.TextMapPropagatorComponentProvider; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Stream; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class TextMapPropagatorFactoryTest { + + private final SpiHelper spiHelper = + SpiHelper.create(TextMapPropagatorFactoryTest.class.getClassLoader()); + + @ParameterizedTest + @MethodSource("createArguments") + void create(List model, TextMapPropagator expectedPropagator) { + TextMapPropagator propagator = + TextMapPropagatorFactory.getInstance().create(model, spiHelper, Collections.emptyList()); + + assertThat(propagator.toString()).isEqualTo(expectedPropagator.toString()); + } + + private static Stream createArguments() { + return Stream.of( + Arguments.of( + Collections.emptyList(), + TextMapPropagator.composite( + W3CTraceContextPropagator.getInstance(), W3CBaggagePropagator.getInstance())), + Arguments.of(Collections.singletonList("none"), TextMapPropagator.noop()), + Arguments.of( + Arrays.asList("tracecontext", "baggage", "ottrace", "b3multi", "b3", "jaeger"), + TextMapPropagator.composite( + W3CTraceContextPropagator.getInstance(), + W3CBaggagePropagator.getInstance(), + OtTracePropagator.getInstance(), + B3Propagator.injectingMultiHeaders(), + B3Propagator.injectingSingleHeader(), + JaegerPropagator.getInstance()))); + } + + @Test + void create_NoneAndOther() { + assertThatThrownBy( + () -> + TextMapPropagatorFactory.getInstance() + .create(Arrays.asList("none", "foo"), spiHelper, Collections.emptyList())) + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage("propagators contains \"none\" along with other propagators"); + } + + @Test + void create_SpiPropagator_Unknown() { + assertThatThrownBy( + () -> + TextMapPropagatorFactory.getInstance() + .create(Collections.singletonList("foo"), spiHelper, Collections.emptyList())) + .isInstanceOf(DeclarativeConfigException.class) + .hasMessage( + "No component provider detected for io.opentelemetry.context.propagation.TextMapPropagator with name \"foo\"."); + } + + @Test + void create_SpiPropagator_Valid() { + TextMapPropagator textMapPropagator = + TextMapPropagatorFactory.getInstance() + .create(Collections.singletonList("test"), spiHelper, new ArrayList<>()); + assertThat(textMapPropagator) + .isInstanceOfSatisfying( + TextMapPropagatorComponentProvider.TestTextMapPropagator.class, + testTextMapPropagator -> + assertThat(testTextMapPropagator.config) + .isInstanceOfSatisfying( + YamlDeclarativeConfigProperties.class, + config -> assertThat(config.getPropertyKeys()).isEmpty())); + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TracerProviderFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TracerProviderFactoryTest.java index b244acef40a..af6b3a20d07 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TracerProviderFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/TracerProviderFactoryTest.java @@ -8,17 +8,17 @@ import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.trace.samplers.Sampler.alwaysOn; -import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; +import io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporter; import io.opentelemetry.internal.testing.CleanupExtension; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOn; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimits; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchSpanProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Otlp; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Sampler; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporter; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessor; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TracerProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOnModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimitsModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchSpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SamplerModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporterModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessorModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TracerProviderModel; import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.SpanLimits; import java.io.Closeable; @@ -54,33 +54,34 @@ void create(TracerProviderAndAttributeLimits model, SdkTracerProvider expectedPr private static Stream createArguments() { return Stream.of( - Arguments.of(null, SdkTracerProvider.builder().build()), Arguments.of( TracerProviderAndAttributeLimits.create(null, null), SdkTracerProvider.builder().build()), Arguments.of( - TracerProviderAndAttributeLimits.create(new AttributeLimits(), new TracerProvider()), + TracerProviderAndAttributeLimits.create( + new AttributeLimitsModel(), new TracerProviderModel()), SdkTracerProvider.builder().build()), Arguments.of( TracerProviderAndAttributeLimits.create( - new AttributeLimits(), - new TracerProvider() + new AttributeLimitsModel(), + new TracerProviderModel() .withLimits( new io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model - .SpanLimits() + .SpanLimitsModel() .withAttributeCountLimit(1) .withAttributeValueLengthLimit(2) .withEventCountLimit(3) .withLinkCountLimit(4) .withEventAttributeCountLimit(5) .withLinkAttributeCountLimit(6)) - .withSampler(new Sampler().withAlwaysOn(new AlwaysOn())) + .withSampler(new SamplerModel().withAlwaysOn(new AlwaysOnModel())) .withProcessors( Collections.singletonList( - new SpanProcessor() + new SpanProcessorModel() .withBatch( - new BatchSpanProcessor() - .withExporter(new SpanExporter().withOtlp(new Otlp())))))), + new BatchSpanProcessorModel() + .withExporter( + new SpanExporterModel().withOtlp(new OtlpModel())))))), SdkTracerProvider.builder() .setSpanLimits( SpanLimits.builder() @@ -94,7 +95,7 @@ private static Stream createArguments() { .setSampler(alwaysOn()) .addSpanProcessor( io.opentelemetry.sdk.trace.export.BatchSpanProcessor.builder( - OtlpGrpcSpanExporter.getDefault()) + OtlpHttpSpanExporter.getDefault()) .build()) .build())); } diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ViewFactoryTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ViewFactoryTest.java index e04d5460a2d..9196b354bf3 100644 --- a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ViewFactoryTest.java +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/ViewFactoryTest.java @@ -6,14 +6,13 @@ package io.opentelemetry.sdk.extension.incubator.fileconfig; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.mock; import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper; -import io.opentelemetry.sdk.autoconfigure.spi.ConfigurationException; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Aggregation; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExplicitBucketHistogram; -import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Stream; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AggregationModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExplicitBucketHistogramModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.IncludeExcludeModel; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.StreamModel; import io.opentelemetry.sdk.metrics.View; import java.util.Arrays; import java.util.Collections; @@ -22,16 +21,6 @@ class ViewFactoryTest { - @Test - void create_Null() { - assertThatThrownBy( - () -> - ViewFactory.getInstance() - .create(null, mock(SpiHelper.class), Collections.emptyList())) - .isInstanceOf(ConfigurationException.class) - .hasMessage("stream must not be null"); - } - @Test void create_Defaults() { View expectedView = View.builder().build(); @@ -39,7 +28,7 @@ void create_Defaults() { View view = ViewFactory.getInstance() .create( - new Stream().withAttributeKeys(null), + new StreamModel().withAttributeKeys(null), mock(SpiHelper.class), Collections.emptyList()); @@ -61,14 +50,15 @@ void create() { View view = ViewFactory.getInstance() .create( - new Stream() + new StreamModel() .withName("name") .withDescription("description") - .withAttributeKeys(Arrays.asList("foo", "bar")) + .withAttributeKeys( + new IncludeExcludeModel().withIncluded(Arrays.asList("foo", "bar"))) .withAggregation( - new Aggregation() + new AggregationModel() .withExplicitBucketHistogram( - new ExplicitBucketHistogram() + new ExplicitBucketHistogramModel() .withBoundaries(Arrays.asList(1.0, 2.0)))), mock(SpiHelper.class), Collections.emptyList()); diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/YamlDeclarativeConfigPropertiesTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/YamlDeclarativeConfigPropertiesTest.java new file mode 100644 index 00000000000..14f213245f4 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/YamlDeclarativeConfigPropertiesTest.java @@ -0,0 +1,251 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig; + +import static io.opentelemetry.api.incubator.config.DeclarativeConfigProperties.empty; +import static org.assertj.core.api.Assertions.assertThat; + +import com.google.common.collect.ImmutableSet; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfigurationModel; +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class YamlDeclarativeConfigPropertiesTest { + + private static final String extendedSchema = + "file_format: \"0.3\"\n" + + "disabled: false\n" + + "\n" + + "resource:\n" + + " attributes:\n" + + " - name: service.name\n" + + " value: \"unknown_service\"\n" + + "\n" + + "other:\n" + + " str_key: str_value\n" + + " int_key: 1\n" + + " float_key: 1.1\n" + + " bool_key: true\n" + + " null_key:\n" + + " str_list_key: [val1, val2]\n" + + " int_list_key: [1, 2]\n" + + " float_list_key: [1.1, 2.2]\n" + + " bool_list_key: [true, false]\n" + + " mixed_list_key: [val1, 1, 1.1, true]\n" + + " map_key:\n" + + " str_key1: str_value1\n" + + " int_key1: 2\n" + + " map_key1:\n" + + " str_key2: str_value2\n" + + " int_key2: 3\n" + + " list_key:\n" + + " - str_key1: str_value1\n" + + " int_key1: 2\n" + + " map_key1:\n" + + " str_key2: str_value2\n" + + " int_key2: 3\n" + + " - str_key1: str_value1\n" + + " int_key1: 2"; + + private DeclarativeConfigProperties structuredConfigProps; + + @BeforeEach + void setup() { + OpenTelemetryConfigurationModel configuration = + DeclarativeConfiguration.parse( + new ByteArrayInputStream(extendedSchema.getBytes(StandardCharsets.UTF_8))); + structuredConfigProps = DeclarativeConfiguration.toConfigProperties(configuration); + } + + @Test + void configurationSchema() { + // Validate can read declarative configuration schema properties + assertThat(structuredConfigProps.getString("file_format")).isEqualTo("0.3"); + DeclarativeConfigProperties resourceProps = structuredConfigProps.getStructured("resource"); + assertThat(resourceProps).isNotNull(); + List resourceAttributesList = + resourceProps.getStructuredList("attributes"); + assertThat(resourceAttributesList) + .isNotNull() + .satisfiesExactly( + attributeEntry -> { + assertThat(attributeEntry.getString("name")).isEqualTo("service.name"); + assertThat(attributeEntry.getString("value")).isEqualTo("unknown_service"); + }); + } + + @Test + void additionalProperties() { + assertThat(structuredConfigProps.getPropertyKeys()) + .isEqualTo(ImmutableSet.of("file_format", "disabled", "resource", "other")); + + // Validate can read properties not part of configuration schema + // .other + DeclarativeConfigProperties otherProps = structuredConfigProps.getStructured("other"); + assertThat(otherProps).isNotNull(); + assertThat(otherProps.getPropertyKeys()) + .isEqualTo( + ImmutableSet.of( + "str_key", + "int_key", + "float_key", + "bool_key", + "null_key", + "str_list_key", + "int_list_key", + "float_list_key", + "bool_list_key", + "mixed_list_key", + "map_key", + "list_key")); + assertThat(otherProps.getString("str_key")).isEqualTo("str_value"); + assertThat(otherProps.getInt("int_key")).isEqualTo(1); + assertThat(otherProps.getLong("int_key")).isEqualTo(1); + assertThat(otherProps.getDouble("float_key")).isEqualTo(1.1); + assertThat(otherProps.getString("null_key")).isNull(); + assertThat(otherProps.getInt("null_key")).isNull(); + assertThat(otherProps.getLong("null_key")).isNull(); + assertThat(otherProps.getBoolean("null_key")).isNull(); + assertThat(otherProps.getScalarList("str_list_key", String.class)) + .isEqualTo(Arrays.asList("val1", "val2")); + assertThat(otherProps.getScalarList("int_list_key", Long.class)) + .isEqualTo(Arrays.asList(1L, 2L)); + assertThat(otherProps.getScalarList("float_list_key", Double.class)) + .isEqualTo(Arrays.asList(1.1d, 2.2d)); + assertThat(otherProps.getScalarList("bool_list_key", Boolean.class)) + .isEqualTo(Arrays.asList(true, false)); + // If reading a scalar list which is mixed, entries which are not aligned with the requested + // type are filtered out + assertThat(otherProps.getScalarList("mixed_list_key", String.class)) + .isEqualTo(Collections.singletonList("val1")); + assertThat(otherProps.getScalarList("mixed_list_key", Long.class)) + .isEqualTo(Collections.singletonList(1L)); + assertThat(otherProps.getScalarList("mixed_list_key", Double.class)) + .isEqualTo(Collections.singletonList(1.1d)); + assertThat(otherProps.getScalarList("mixed_list_key", Boolean.class)) + .isEqualTo(Collections.singletonList(true)); + + // .other.map_key + DeclarativeConfigProperties otherMapKeyProps = otherProps.getStructured("map_key"); + assertThat(otherMapKeyProps).isNotNull(); + assertThat(otherMapKeyProps.getPropertyKeys()) + .isEqualTo(ImmutableSet.of("str_key1", "int_key1", "map_key1")); + assertThat(otherMapKeyProps.getString("str_key1")).isEqualTo("str_value1"); + assertThat(otherMapKeyProps.getInt("int_key1")).isEqualTo(2); + // other.map_key.map_key1 + DeclarativeConfigProperties otherMapKeyMapKey1Props = + otherMapKeyProps.getStructured("map_key1"); + assertThat(otherMapKeyMapKey1Props).isNotNull(); + assertThat(otherMapKeyMapKey1Props.getPropertyKeys()) + .isEqualTo(ImmutableSet.of("str_key2", "int_key2")); + assertThat(otherMapKeyMapKey1Props.getString("str_key2")).isEqualTo("str_value2"); + assertThat(otherMapKeyMapKey1Props.getInt("int_key2")).isEqualTo(3); + + // .other.list_key + List listKey = otherProps.getStructuredList("list_key"); + assertThat(listKey).hasSize(2); + DeclarativeConfigProperties listKeyProps1 = listKey.get(0); + assertThat(listKeyProps1.getPropertyKeys()) + .isEqualTo(ImmutableSet.of("str_key1", "int_key1", "map_key1")); + assertThat(listKeyProps1.getString("str_key1")).isEqualTo("str_value1"); + assertThat(listKeyProps1.getInt("int_key1")).isEqualTo(2); + // .other.list_key[0] + DeclarativeConfigProperties listKeyProps1MapKeyProps = listKeyProps1.getStructured("map_key1"); + assertThat(listKeyProps1MapKeyProps).isNotNull(); + assertThat(listKeyProps1MapKeyProps.getPropertyKeys()) + .isEqualTo(ImmutableSet.of("str_key2", "int_key2")); + assertThat(listKeyProps1MapKeyProps.getString("str_key2")).isEqualTo("str_value2"); + assertThat(listKeyProps1MapKeyProps.getInt("int_key2")).isEqualTo(3); + // .other.list_key[1] + DeclarativeConfigProperties listKeyProps2 = listKey.get(1); + assertThat(listKeyProps2.getPropertyKeys()).isEqualTo(ImmutableSet.of("str_key1", "int_key1")); + assertThat(listKeyProps2.getString("str_key1")).isEqualTo("str_value1"); + assertThat(listKeyProps2.getInt("int_key1")).isEqualTo(2); + } + + @Test + void treeWalking() { + // Validate common pattern of walking down tree path which is not defined + // Access string at .foo.bar.baz without null checking and without exception. + assertThat( + structuredConfigProps + .getStructured("foo", empty()) + .getStructured("bar", empty()) + .getString("baz")) + .isNull(); + } + + @Test + void defaults() { + assertThat(structuredConfigProps.getString("foo", "bar")).isEqualTo("bar"); + assertThat(structuredConfigProps.getInt("foo", 1)).isEqualTo(1); + assertThat(structuredConfigProps.getLong("foo", 1)).isEqualTo(1); + assertThat(structuredConfigProps.getDouble("foo", 1.1)).isEqualTo(1.1); + assertThat(structuredConfigProps.getBoolean("foo", true)).isTrue(); + assertThat( + structuredConfigProps.getScalarList( + "foo", String.class, Collections.singletonList("bar"))) + .isEqualTo(Collections.singletonList("bar")); + assertThat(structuredConfigProps.getStructured("foo", empty())).isEqualTo(empty()); + assertThat(structuredConfigProps.getStructuredList("foo", Collections.emptyList())) + .isEqualTo(Collections.emptyList()); + } + + @Test + void missingKeys() { + assertThat(structuredConfigProps.getString("foo")).isNull(); + assertThat(structuredConfigProps.getInt("foo")).isNull(); + assertThat(structuredConfigProps.getLong("foo")).isNull(); + assertThat(structuredConfigProps.getDouble("foo")).isNull(); + assertThat(structuredConfigProps.getBoolean("foo")).isNull(); + assertThat(structuredConfigProps.getScalarList("foo", String.class)).isNull(); + assertThat(structuredConfigProps.getStructured("foo")).isNull(); + assertThat(structuredConfigProps.getStructuredList("foo")).isNull(); + } + + @Test + void wrongType() { + DeclarativeConfigProperties otherProps = structuredConfigProps.getStructured("other"); + assertThat(otherProps).isNotNull(); + + assertThat(otherProps.getString("int_key")).isNull(); + assertThat(otherProps.getInt("str_key")).isNull(); + assertThat(otherProps.getLong("str_key")).isNull(); + assertThat(otherProps.getDouble("str_key")).isNull(); + assertThat(otherProps.getBoolean("str_key")).isNull(); + assertThat(otherProps.getScalarList("str_key", String.class)).isNull(); + assertThat(otherProps.getStructured("str_key")).isNull(); + assertThat(otherProps.getStructuredList("str_key")).isNull(); + } + + @Test + void emptyProperties() { + assertThat(empty().getString("foo")).isNull(); + assertThat(empty().getInt("foo")).isNull(); + assertThat(empty().getLong("foo")).isNull(); + assertThat(empty().getDouble("foo")).isNull(); + assertThat(empty().getBoolean("foo")).isNull(); + assertThat(empty().getScalarList("foo", String.class)).isNull(); + assertThat(empty().getStructured("foo")).isNull(); + assertThat(empty().getStructuredList("foo")).isNull(); + assertThat(empty().getString("foo", "bar")).isEqualTo("bar"); + assertThat(empty().getInt("foo", 1)).isEqualTo(1); + assertThat(empty().getLong("foo", 1)).isEqualTo(1); + assertThat(empty().getDouble("foo", 1.1)).isEqualTo(1.1); + assertThat(empty().getBoolean("foo", true)).isTrue(); + assertThat(empty().getScalarList("foo", String.class, Collections.singletonList("bar"))) + .isEqualTo(Collections.singletonList("bar")); + assertThat(empty().getStructured("foo", empty())).isEqualTo(empty()); + assertThat(empty().getStructuredList("foo", Collections.emptyList())) + .isEqualTo(Collections.emptyList()); + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/LogRecordExporterComponentProvider.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/LogRecordExporterComponentProvider.java new file mode 100644 index 00000000000..f21c1d1bf09 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/LogRecordExporterComponentProvider.java @@ -0,0 +1,54 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig.component; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.export.LogRecordExporter; +import java.util.Collection; + +public class LogRecordExporterComponentProvider implements ComponentProvider { + @Override + public Class getType() { + return LogRecordExporter.class; + } + + @Override + public String getName() { + return "test"; + } + + @Override + public LogRecordExporter create(DeclarativeConfigProperties config) { + return new TestLogRecordExporter(config); + } + + public static class TestLogRecordExporter implements LogRecordExporter { + + public final DeclarativeConfigProperties config; + + private TestLogRecordExporter(DeclarativeConfigProperties config) { + this.config = config; + } + + @Override + public CompletableResultCode export(Collection logs) { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/LogRecordProcessorComponentProvider.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/LogRecordProcessorComponentProvider.java new file mode 100644 index 00000000000..a44cd939c74 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/LogRecordProcessorComponentProvider.java @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig.component; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.logs.LogRecordProcessor; +import io.opentelemetry.sdk.logs.ReadWriteLogRecord; + +public class LogRecordProcessorComponentProvider implements ComponentProvider { + @Override + public Class getType() { + return LogRecordProcessor.class; + } + + @Override + public String getName() { + return "test"; + } + + @Override + public LogRecordProcessor create(DeclarativeConfigProperties config) { + return new TestLogRecordProcessor(config); + } + + public static class TestLogRecordProcessor implements LogRecordProcessor { + + public final DeclarativeConfigProperties config; + + private TestLogRecordProcessor(DeclarativeConfigProperties config) { + this.config = config; + } + + @Override + public void onEmit(Context context, ReadWriteLogRecord logRecord) {} + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/MetricExporterComponentProvider.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/MetricExporterComponentProvider.java new file mode 100644 index 00000000000..80ea6b556ce --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/MetricExporterComponentProvider.java @@ -0,0 +1,63 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig.component; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import java.util.Collection; + +public class MetricExporterComponentProvider implements ComponentProvider { + @Override + public Class getType() { + return MetricExporter.class; + } + + @Override + public String getName() { + return "test"; + } + + @Override + public MetricExporter create(DeclarativeConfigProperties config) { + return new TestMetricExporter(config); + } + + public static class TestMetricExporter implements MetricExporter { + + public final DeclarativeConfigProperties config; + + private TestMetricExporter(DeclarativeConfigProperties config) { + this.config = config; + } + + @Override + public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) { + return AggregationTemporalitySelector.alwaysCumulative() + .getAggregationTemporality(instrumentType); + } + + @Override + public CompletableResultCode export(Collection metrics) { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/ResourceComponentProvider.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/ResourceComponentProvider.java new file mode 100644 index 00000000000..0ec06894896 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/ResourceComponentProvider.java @@ -0,0 +1,27 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig.component; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.resources.Resource; + +public class ResourceComponentProvider implements ComponentProvider { + @Override + public Class getType() { + return Resource.class; + } + + @Override + public String getName() { + return "unused"; + } + + @Override + public Resource create(DeclarativeConfigProperties config) { + return Resource.builder().put("shape", "square").put("color", "red").build(); + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/ResourceOrderedFirstComponentProvider.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/ResourceOrderedFirstComponentProvider.java new file mode 100644 index 00000000000..181c7b469c8 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/ResourceOrderedFirstComponentProvider.java @@ -0,0 +1,33 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig.component; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.Ordered; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.resources.Resource; + +public class ResourceOrderedFirstComponentProvider implements ComponentProvider, Ordered { + @Override + public Class getType() { + return Resource.class; + } + + @Override + public String getName() { + return "unused"; + } + + @Override + public Resource create(DeclarativeConfigProperties config) { + return Resource.builder().put("order", "first").build(); + } + + @Override + public int order() { + return 1; + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/ResourceOrderedSecondComponentProvider.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/ResourceOrderedSecondComponentProvider.java new file mode 100644 index 00000000000..5cbb5e299e4 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/ResourceOrderedSecondComponentProvider.java @@ -0,0 +1,34 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig.component; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.Ordered; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.resources.Resource; + +public class ResourceOrderedSecondComponentProvider + implements ComponentProvider, Ordered { + @Override + public Class getType() { + return Resource.class; + } + + @Override + public String getName() { + return "unused"; + } + + @Override + public Resource create(DeclarativeConfigProperties config) { + return Resource.builder().put("order", "second").build(); + } + + @Override + public int order() { + return 2; + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/SamplerComponentProvider.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/SamplerComponentProvider.java new file mode 100644 index 00000000000..3264dd91fc7 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/SamplerComponentProvider.java @@ -0,0 +1,58 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig.component; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.trace.data.LinkData; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import io.opentelemetry.sdk.trace.samplers.SamplingResult; +import java.util.List; + +public class SamplerComponentProvider implements ComponentProvider { + @Override + public Class getType() { + return Sampler.class; + } + + @Override + public String getName() { + return "test"; + } + + @Override + public Sampler create(DeclarativeConfigProperties config) { + return new TestSampler(config); + } + + public static class TestSampler implements Sampler { + + public final DeclarativeConfigProperties config; + + private TestSampler(DeclarativeConfigProperties config) { + this.config = config; + } + + @Override + public SamplingResult shouldSample( + Context parentContext, + String traceId, + String name, + SpanKind spanKind, + Attributes attributes, + List parentLinks) { + return SamplingResult.recordOnly(); + } + + @Override + public String getDescription() { + return "test"; + } + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/SpanExporterComponentProvider.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/SpanExporterComponentProvider.java new file mode 100644 index 00000000000..ddaca3ca4b9 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/SpanExporterComponentProvider.java @@ -0,0 +1,54 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig.component; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import java.util.Collection; + +public class SpanExporterComponentProvider implements ComponentProvider { + @Override + public Class getType() { + return SpanExporter.class; + } + + @Override + public String getName() { + return "test"; + } + + @Override + public SpanExporter create(DeclarativeConfigProperties config) { + return new TestSpanExporter(config); + } + + public static class TestSpanExporter implements SpanExporter { + + public final DeclarativeConfigProperties config; + + private TestSpanExporter(DeclarativeConfigProperties config) { + this.config = config; + } + + @Override + public CompletableResultCode export(Collection spans) { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/SpanProcessorComponentProvider.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/SpanProcessorComponentProvider.java new file mode 100644 index 00000000000..3a1ddf9b13a --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/SpanProcessorComponentProvider.java @@ -0,0 +1,61 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig.component; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.ReadWriteSpan; +import io.opentelemetry.sdk.trace.ReadableSpan; +import io.opentelemetry.sdk.trace.SpanProcessor; + +public class SpanProcessorComponentProvider implements ComponentProvider { + @Override + public Class getType() { + return SpanProcessor.class; + } + + @Override + public String getName() { + return "test"; + } + + @Override + public SpanProcessor create(DeclarativeConfigProperties config) { + return new TestSpanProcessor(config); + } + + public static class TestSpanProcessor implements SpanProcessor { + + public final DeclarativeConfigProperties config; + + private TestSpanProcessor(DeclarativeConfigProperties config) { + this.config = config; + } + + @Override + public void onStart(Context parentContext, ReadWriteSpan span) {} + + @Override + public boolean isStartRequired() { + return true; + } + + @Override + public void onEnd(ReadableSpan span) {} + + @Override + public boolean isEndRequired() { + return true; + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/TextMapPropagatorComponentProvider.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/TextMapPropagatorComponentProvider.java new file mode 100644 index 00000000000..0ab454da8ad --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/fileconfig/component/TextMapPropagatorComponentProvider.java @@ -0,0 +1,55 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.fileconfig.component; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.propagation.TextMapGetter; +import io.opentelemetry.context.propagation.TextMapPropagator; +import io.opentelemetry.context.propagation.TextMapSetter; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import java.util.Collection; +import java.util.Collections; +import javax.annotation.Nullable; + +public class TextMapPropagatorComponentProvider implements ComponentProvider { + @Override + public Class getType() { + return TextMapPropagator.class; + } + + @Override + public String getName() { + return "test"; + } + + @Override + public TextMapPropagator create(DeclarativeConfigProperties config) { + return new TestTextMapPropagator(config); + } + + public static class TestTextMapPropagator implements TextMapPropagator { + + public final DeclarativeConfigProperties config; + + private TestTextMapPropagator(DeclarativeConfigProperties config) { + this.config = config; + } + + @Override + public Collection fields() { + return Collections.emptyList(); + } + + @Override + public void inject(Context context, @Nullable C carrier, TextMapSetter setter) {} + + @Override + public Context extract(Context context, @Nullable C carrier, TextMapGetter getter) { + return context; + } + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/resources/ServiceInstanceIdResourceProviderTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/resources/ServiceInstanceIdResourceProviderTest.java new file mode 100644 index 00000000000..59acb29e2dd --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/resources/ServiceInstanceIdResourceProviderTest.java @@ -0,0 +1,71 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.resources; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.google.common.collect.ImmutableMap; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties; +import io.opentelemetry.sdk.resources.Resource; +import java.util.Collections; +import java.util.Map; +import java.util.stream.Stream; +import org.junit.jupiter.api.DynamicTest; +import org.junit.jupiter.api.TestFactory; + +class ServiceInstanceIdResourceProviderTest { + + private static class TestCase { + private final String name; + final String expectedValue; + final Map attributes; + + TestCase(String name, String expectedValue, Map attributes) { + this.name = name; + this.expectedValue = expectedValue; + this.attributes = attributes; + } + } + + @TestFactory + Stream createResource() { + return Stream.of( + new TestCase( + "user provided service.instance.id", + null, + ImmutableMap.of("service.instance.id", "custom")), + new TestCase("random value", "random", Collections.emptyMap())) + .map( + testCase -> + DynamicTest.dynamicTest( + testCase.name, + () -> { + ServiceInstanceIdResourceProvider provider = + new ServiceInstanceIdResourceProvider(); + DefaultConfigProperties config = + DefaultConfigProperties.createFromMap(Collections.emptyMap()); + AttributesBuilder builder = Attributes.builder(); + testCase.attributes.forEach(builder::put); + Resource existing = Resource.create(builder.build()); + Resource resource = + provider.shouldApply(config, existing) + ? provider.createResource(config) + : Resource.empty(); + + String actual = + resource + .getAttributes() + .get(ServiceInstanceIdResourceProvider.SERVICE_INSTANCE_ID); + if ("random".equals(testCase.expectedValue)) { + assertThat(actual).isNotNull(); + } else { + assertThat(actual).isEqualTo(testCase.expectedValue); + } + })); + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/trace/OnEndSpanProcessorTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/trace/OnEndSpanProcessorTest.java new file mode 100644 index 00000000000..426b68e8131 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/trace/OnEndSpanProcessorTest.java @@ -0,0 +1,31 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.trace; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; + +import io.opentelemetry.sdk.trace.ReadWriteSpan; +import io.opentelemetry.sdk.trace.ReadableSpan; +import io.opentelemetry.sdk.trace.SpanProcessor; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.jupiter.api.Test; + +class OnEndSpanProcessorTest { + + @Test + void endOnly() { + AtomicReference seenSpan = new AtomicReference<>(); + ReadWriteSpan inputSpan = mock(ReadWriteSpan.class); + + SpanProcessor processor = OnEndSpanProcessor.create(seenSpan::set); + + assertThat(processor.isStartRequired()).isFalse(); + assertThat(processor.isEndRequired()).isTrue(); + processor.onEnd(inputSpan); + assertThat(seenSpan.get()).isSameAs(inputSpan); + } +} diff --git a/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/trace/OnStartSpanProcessorTest.java b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/trace/OnStartSpanProcessorTest.java new file mode 100644 index 00000000000..245486ec9e2 --- /dev/null +++ b/sdk-extensions/incubator/src/test/java/io/opentelemetry/sdk/extension/incubator/trace/OnStartSpanProcessorTest.java @@ -0,0 +1,39 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.incubator.trace; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; + +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.trace.ReadWriteSpan; +import io.opentelemetry.sdk.trace.SpanProcessor; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.jupiter.api.Test; + +class OnStartSpanProcessorTest { + + @Test + void startOnly() { + AtomicReference seenContext = new AtomicReference<>(); + AtomicReference seenSpan = new AtomicReference<>(); + Context context = mock(Context.class); + ReadWriteSpan inputSpan = mock(ReadWriteSpan.class); + + SpanProcessor processor = + OnStartSpanProcessor.create( + (ctx, span) -> { + seenContext.set(ctx); + seenSpan.set(span); + }); + + assertThat(processor.isStartRequired()).isTrue(); + assertThat(processor.isEndRequired()).isFalse(); + processor.onStart(context, inputSpan); + assertThat(seenContext.get()).isSameAs(context); + assertThat(seenSpan.get()).isSameAs(inputSpan); + } +} diff --git a/sdk-extensions/incubator/src/test/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider b/sdk-extensions/incubator/src/test/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider new file mode 100644 index 00000000000..bf04a784ecd --- /dev/null +++ b/sdk-extensions/incubator/src/test/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider @@ -0,0 +1,10 @@ +io.opentelemetry.sdk.extension.incubator.fileconfig.component.MetricExporterComponentProvider +io.opentelemetry.sdk.extension.incubator.fileconfig.component.SpanExporterComponentProvider +io.opentelemetry.sdk.extension.incubator.fileconfig.component.LogRecordExporterComponentProvider +io.opentelemetry.sdk.extension.incubator.fileconfig.component.TextMapPropagatorComponentProvider +io.opentelemetry.sdk.extension.incubator.fileconfig.component.SamplerComponentProvider +io.opentelemetry.sdk.extension.incubator.fileconfig.component.SpanProcessorComponentProvider +io.opentelemetry.sdk.extension.incubator.fileconfig.component.LogRecordProcessorComponentProvider +io.opentelemetry.sdk.extension.incubator.fileconfig.component.ResourceComponentProvider +io.opentelemetry.sdk.extension.incubator.fileconfig.component.ResourceOrderedFirstComponentProvider +io.opentelemetry.sdk.extension.incubator.fileconfig.component.ResourceOrderedSecondComponentProvider diff --git a/sdk-extensions/incubator/src/test/resources/META-INF/services/io.opentelemetry.sdk.extension.incubator.fileconfig.DeclarativeConfigurationCustomizerProvider b/sdk-extensions/incubator/src/test/resources/META-INF/services/io.opentelemetry.sdk.extension.incubator.fileconfig.DeclarativeConfigurationCustomizerProvider new file mode 100644 index 00000000000..f60ca4e082e --- /dev/null +++ b/sdk-extensions/incubator/src/test/resources/META-INF/services/io.opentelemetry.sdk.extension.incubator.fileconfig.DeclarativeConfigurationCustomizerProvider @@ -0,0 +1 @@ +io.opentelemetry.sdk.extension.incubator.fileconfig.TestDeclarativeConfigurationCustomizerProvider diff --git a/sdk-extensions/jaeger-remote-sampler/build.gradle.kts b/sdk-extensions/jaeger-remote-sampler/build.gradle.kts index fca0210b6a1..32b51d6065d 100644 --- a/sdk-extensions/jaeger-remote-sampler/build.gradle.kts +++ b/sdk-extensions/jaeger-remote-sampler/build.gradle.kts @@ -12,7 +12,9 @@ otelJava.moduleName.set("io.opentelemetry.sdk.extension.trace.jaeger") dependencies { api(project(":sdk:all")) + compileOnly(project(":api:incubator")) compileOnly(project(":sdk-extensions:autoconfigure")) + compileOnly(project(":sdk-extensions:incubator")) implementation(project(":sdk:all")) implementation(project(":exporters:common")) diff --git a/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerBuilder.java b/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerBuilder.java index 4cb4f6f4970..24dfac82acc 100644 --- a/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerBuilder.java +++ b/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerBuilder.java @@ -21,6 +21,7 @@ import javax.annotation.Nullable; import javax.net.ssl.SSLContext; import javax.net.ssl.X509TrustManager; +import okhttp3.ConnectionSpec; import okhttp3.Headers; import okhttp3.OkHttpClient; import okhttp3.Protocol; @@ -165,14 +166,17 @@ public JaegerRemoteSampler build() { clientBuilder.callTimeout(Duration.ofNanos(TimeUnit.SECONDS.toNanos(DEFAULT_TIMEOUT_SECS))); - SSLContext sslContext = tlsConfigHelper.getSslContext(); - X509TrustManager trustManager = tlsConfigHelper.getTrustManager(); + String endpoint = this.endpoint.resolve(GRPC_ENDPOINT_PATH).toString(); + boolean isPlainHttp = endpoint.startsWith("http://"); + + SSLContext sslContext = isPlainHttp ? null : tlsConfigHelper.getSslContext(); + X509TrustManager trustManager = isPlainHttp ? null : tlsConfigHelper.getTrustManager(); if (sslContext != null && trustManager != null) { clientBuilder.sslSocketFactory(sslContext.getSocketFactory(), trustManager); } - String endpoint = this.endpoint.resolve(GRPC_ENDPOINT_PATH).toString(); - if (endpoint.startsWith("http://")) { + if (isPlainHttp) { + clientBuilder.connectionSpecs(Collections.singletonList(ConnectionSpec.CLEARTEXT)); clientBuilder.protocols(Collections.singletonList(Protocol.H2_PRIOR_KNOWLEDGE)); } else { clientBuilder.protocols(Arrays.asList(Protocol.HTTP_2, Protocol.HTTP_1_1)); diff --git a/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerProvider.java b/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerProvider.java index d099f3b33f8..422a3b3f4e4 100644 --- a/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerProvider.java +++ b/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerProvider.java @@ -10,16 +10,21 @@ import io.opentelemetry.sdk.trace.samplers.Sampler; import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; public class JaegerRemoteSamplerProvider implements ConfigurableSamplerProvider { + private static final Logger LOGGER = + Logger.getLogger(JaegerRemoteSamplerProvider.class.getName()); + // visible for testing static final String ATTRIBUTE_PROPERTY = "otel.resource.attributes"; static final String SERVICE_NAME_PROPERTY = "otel.service.name"; static final String SAMPLER_ARG_PROPERTY = "otel.traces.sampler.arg"; static final String RESOURCE_ATTRIBUTE_SERVICE_NAME_PROPERTY = "service.name"; private static final String ENDPOINT_KEY = "endpoint"; - private static final String POLLING_INTERVAL = "pollingInterval"; + private static final String POLLING_INTERVAL = "pollingIntervalMs"; private static final String INITIAL_SAMPLING_RATE = "initialSamplingRate"; @Override @@ -43,9 +48,23 @@ public Sampler createSampler(ConfigProperties config) { builder.setEndpoint(endpoint); } String pollingInterval = params.get(POLLING_INTERVAL); + // Previously, we mistakenly read from pollingInterval. For backwards compatibility, check + // pollingInterval and log warning if set. + if (pollingInterval == null) { + pollingInterval = params.get("pollingInterval"); + if (pollingInterval != null) { + LOGGER.log( + Level.WARNING, + SAMPLER_ARG_PROPERTY + + " contains deprecated \"pollingInterval\" property. Please use \"" + + POLLING_INTERVAL + + "\" instead."); + } + } if (pollingInterval != null) { builder.setPollingInterval(Integer.valueOf(pollingInterval), TimeUnit.MILLISECONDS); } + String initialSamplingRate = params.get(INITIAL_SAMPLING_RATE); if (initialSamplingRate != null) { builder.setInitialSampler( diff --git a/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/OkHttpGrpcService.java b/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/OkHttpGrpcService.java index 02cc43869cb..ca1455ad3f3 100644 --- a/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/OkHttpGrpcService.java +++ b/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/OkHttpGrpcService.java @@ -53,7 +53,7 @@ public SamplingStrategyResponseUnMarshaler execute( SamplingStrategyResponseUnMarshaler responseUnmarshaller) { Request.Builder requestBuilder = new Request.Builder().url(url).headers(headers); - RequestBody requestBody = new GrpcRequestBody(exportRequest, false); + RequestBody requestBody = new GrpcRequestBody(exportRequest, null); requestBuilder.post(requestBody); try { diff --git a/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/RateLimitingSampler.java b/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/RateLimitingSampler.java index 6a6caa171e6..462f87672c4 100644 --- a/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/RateLimitingSampler.java +++ b/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/RateLimitingSampler.java @@ -21,6 +21,7 @@ import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.util.List; +import java.util.Locale; /** * {@link RateLimitingSampler} sampler uses a leaky bucket rate limiter to ensure that traces are @@ -73,7 +74,7 @@ public String toString() { } private static String decimalFormat(double value) { - DecimalFormatSymbols decimalFormatSymbols = DecimalFormatSymbols.getInstance(); + DecimalFormatSymbols decimalFormatSymbols = DecimalFormatSymbols.getInstance(Locale.ROOT); decimalFormatSymbols.setDecimalSeparator('.'); DecimalFormat decimalFormat = new DecimalFormat("0.00", decimalFormatSymbols); diff --git a/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/UpstreamGrpcService.java b/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/UpstreamGrpcService.java index 32b19e4c0bc..2976ff1302c 100644 --- a/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/UpstreamGrpcService.java +++ b/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/UpstreamGrpcService.java @@ -11,7 +11,8 @@ import io.opentelemetry.exporter.internal.grpc.ManagedChannelUtil; import io.opentelemetry.exporter.internal.grpc.MarshalerServiceStub; import io.opentelemetry.sdk.common.CompletableResultCode; -import java.util.concurrent.TimeUnit; +import java.time.Duration; +import java.util.Objects; import java.util.logging.Level; import java.util.logging.Logger; @@ -48,11 +49,11 @@ public SamplingStrategyResponseUnMarshaler execute( SamplingStrategyParametersMarshaler, SamplingStrategyResponseUnMarshaler, ?> stub = this.stub; if (timeoutNanos > 0) { - stub = stub.withDeadlineAfter(timeoutNanos, TimeUnit.NANOSECONDS); + stub = stub.withDeadlineAfter(Duration.ofNanos(timeoutNanos)); } try { - return Futures.getUnchecked(stub.export(exportRequest)); + return Objects.requireNonNull(Futures.getUnchecked(stub.export(exportRequest))); } catch (Throwable t) { Status status = Status.fromThrowable(t); diff --git a/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/internal/JaegerRemoteSamplerComponentProvider.java b/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/internal/JaegerRemoteSamplerComponentProvider.java new file mode 100644 index 00000000000..fd89f42cae9 --- /dev/null +++ b/sdk-extensions/jaeger-remote-sampler/src/main/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/internal/JaegerRemoteSamplerComponentProvider.java @@ -0,0 +1,54 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.extension.trace.jaeger.sampler.internal; + +import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider; +import io.opentelemetry.sdk.extension.incubator.fileconfig.DeclarativeConfiguration; +import io.opentelemetry.sdk.extension.trace.jaeger.sampler.JaegerRemoteSampler; +import io.opentelemetry.sdk.extension.trace.jaeger.sampler.JaegerRemoteSamplerBuilder; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.time.Duration; + +/** + * File configuration SPI implementation for {@link JaegerRemoteSampler}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public class JaegerRemoteSamplerComponentProvider implements ComponentProvider { + @Override + public Class getType() { + return Sampler.class; + } + + @Override + public String getName() { + return "jaeger_remote"; + } + + @Override + public Sampler create(DeclarativeConfigProperties config) { + JaegerRemoteSamplerBuilder builder = JaegerRemoteSampler.builder(); + + // Optional configuration + String endpoint = config.getString("endpoint"); + if (endpoint != null) { + builder.setEndpoint(endpoint); + } + Long pollingIntervalMs = config.getLong("internal"); + if (pollingIntervalMs != null) { + builder.setPollingInterval(Duration.ofMillis(pollingIntervalMs)); + } + DeclarativeConfigProperties initialSamplerModel = config.getStructured("initial_sampler"); + if (initialSamplerModel != null) { + Sampler initialSampler = DeclarativeConfiguration.createSampler(initialSamplerModel); + builder.setInitialSampler(initialSampler); + } + + return builder.build(); + } +} diff --git a/sdk-extensions/jaeger-remote-sampler/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider b/sdk-extensions/jaeger-remote-sampler/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider new file mode 100644 index 00000000000..a06efe29e4f --- /dev/null +++ b/sdk-extensions/jaeger-remote-sampler/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider @@ -0,0 +1 @@ +io.opentelemetry.sdk.extension.trace.jaeger.sampler.internal.JaegerRemoteSamplerComponentProvider diff --git a/sdk-extensions/jaeger-remote-sampler/src/test/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerIntegrationTest.java b/sdk-extensions/jaeger-remote-sampler/src/test/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerIntegrationTest.java index fd36ee29ca4..c873808c332 100644 --- a/sdk-extensions/jaeger-remote-sampler/src/test/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerIntegrationTest.java +++ b/sdk-extensions/jaeger-remote-sampler/src/test/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerIntegrationTest.java @@ -35,7 +35,7 @@ class JaegerRemoteSamplerIntegrationTest { private static final String SERVICE_NAME_DEFAULT_STRATEGY = "foobar"; @Container - public static GenericContainer jaegerContainer = + public static final GenericContainer jaegerContainer = new GenericContainer<>("ghcr.io/open-telemetry/opentelemetry-java/jaeger:1.32") .withImagePullPolicy(PullPolicy.alwaysPull()) .withCommand("--sampling.strategies-file=/sampling.json") diff --git a/sdk-extensions/jaeger-remote-sampler/src/test/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerProviderTest.java b/sdk-extensions/jaeger-remote-sampler/src/test/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerProviderTest.java index 8fa43a7f759..e691b576f18 100644 --- a/sdk-extensions/jaeger-remote-sampler/src/test/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerProviderTest.java +++ b/sdk-extensions/jaeger-remote-sampler/src/test/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerProviderTest.java @@ -34,7 +34,7 @@ void serviceProvider() { .thenReturn("test_service"); HashMap samplerArgs = new HashMap<>(); samplerArgs.put("endpoint", "http://localhost:9999"); - samplerArgs.put("pollingInterval", "99"); + samplerArgs.put("pollingIntervalMs", "99"); double samplingRate = 0.33; samplerArgs.put("initialSamplingRate", String.valueOf(samplingRate)); when(mockConfig.getMap(JaegerRemoteSamplerProvider.SAMPLER_ARG_PROPERTY)) diff --git a/sdk-extensions/jaeger-remote-sampler/src/test/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerTest.java b/sdk-extensions/jaeger-remote-sampler/src/test/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerTest.java index d1b16920e2e..7f81789c129 100644 --- a/sdk-extensions/jaeger-remote-sampler/src/test/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerTest.java +++ b/sdk-extensions/jaeger-remote-sampler/src/test/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerTest.java @@ -12,6 +12,7 @@ import static org.junit.jupiter.api.Named.named; import static org.junit.jupiter.params.provider.Arguments.arguments; +import com.linecorp.armeria.common.TlsKeyPair; import com.linecorp.armeria.common.grpc.protocol.ArmeriaStatusException; import com.linecorp.armeria.server.ServerBuilder; import com.linecorp.armeria.server.ServiceRequestContext; @@ -117,7 +118,7 @@ protected CompletionStage handleMessage( }); sb.http(0); sb.https(0); - sb.tls(certificate.certificateFile(), certificate.privateKeyFile()); + sb.tls(TlsKeyPair.of(certificate.privateKey(), certificate.certificate())); sb.tlsCustomizer( ssl -> { ssl.clientAuth(ClientAuth.OPTIONAL); diff --git a/sdk-extensions/jaeger-remote-sampler/src/testGrpcNetty/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerGrpcNettyTest.java b/sdk-extensions/jaeger-remote-sampler/src/testGrpcNetty/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerGrpcNettyTest.java index 5eb5db44bec..257ddf3b139 100644 --- a/sdk-extensions/jaeger-remote-sampler/src/testGrpcNetty/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerGrpcNettyTest.java +++ b/sdk-extensions/jaeger-remote-sampler/src/testGrpcNetty/java/io/opentelemetry/sdk/extension/trace/jaeger/sampler/JaegerRemoteSamplerGrpcNettyTest.java @@ -148,7 +148,7 @@ void description() { void initialSampler() { try (JaegerRemoteSampler sampler = JaegerRemoteSampler.builder() - .setChannel(managedChannel()) + .setChannel(ManagedChannelBuilder.forTarget("example.com").build()) .setServiceName(SERVICE_NAME) .setInitialSampler(Sampler.alwaysOn()) .build()) { diff --git a/sdk/all/src/test/java/io/opentelemetry/sdk/OpenTelemetrySdkTest.java b/sdk/all/src/test/java/io/opentelemetry/sdk/OpenTelemetrySdkTest.java index 09e1a402671..b6ee0aff2d4 100644 --- a/sdk/all/src/test/java/io/opentelemetry/sdk/OpenTelemetrySdkTest.java +++ b/sdk/all/src/test/java/io/opentelemetry/sdk/OpenTelemetrySdkTest.java @@ -415,12 +415,13 @@ void stringRepresentation() { + "resource=Resource{schemaUrl=null, attributes={service.name=\"otel-test\"}}, " + "spanLimitsSupplier=SpanLimitsValue{maxNumberOfAttributes=128, maxNumberOfEvents=128, maxNumberOfLinks=128, maxNumberOfAttributesPerEvent=128, maxNumberOfAttributesPerLink=128, maxAttributeValueLength=2147483647}, " + "sampler=ParentBased{root:AlwaysOnSampler,remoteParentSampled:AlwaysOnSampler,remoteParentNotSampled:AlwaysOffSampler,localParentSampled:AlwaysOnSampler,localParentNotSampled:AlwaysOffSampler}, " - + "spanProcessor=SimpleSpanProcessor{spanExporter=MultiSpanExporter{spanExporters=[MockSpanExporter{}, MockSpanExporter{}]}}" + + "spanProcessor=SimpleSpanProcessor{spanExporter=MultiSpanExporter{spanExporters=[MockSpanExporter{}, MockSpanExporter{}]}, exportUnsampledSpans=false}" + "}, " + "meterProvider=SdkMeterProvider{" + "clock=SystemClock{}, " + "resource=Resource{schemaUrl=null, attributes={service.name=\"otel-test\"}}, " + "metricReaders=[PeriodicMetricReader{exporter=MockMetricExporter{}, intervalNanos=60000000000}], " + + "metricProducers=[], " + "views=[RegisteredView{instrumentSelector=InstrumentSelector{instrumentName=instrument}, view=View{name=new-instrument, aggregation=DefaultAggregation, attributesProcessor=NoopAttributesProcessor{}, cardinalityLimit=2000}}]" + "}, " + "loggerProvider=SdkLoggerProvider{" diff --git a/sdk/all/src/test/java/io/opentelemetry/sdk/ScopeConfiguratorTest.java b/sdk/all/src/test/java/io/opentelemetry/sdk/ScopeConfiguratorTest.java new file mode 100644 index 00000000000..0055df8ce7f --- /dev/null +++ b/sdk/all/src/test/java/io/opentelemetry/sdk/ScopeConfiguratorTest.java @@ -0,0 +1,237 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk; + +import static io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder.nameEquals; +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.logs.Logger; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.SdkLoggerProvider; +import io.opentelemetry.sdk.logs.SdkLoggerProviderBuilder; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor; +import io.opentelemetry.sdk.logs.internal.LoggerConfig; +import io.opentelemetry.sdk.logs.internal.SdkLoggerProviderUtil; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.internal.MeterConfig; +import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil; +import io.opentelemetry.sdk.testing.exporter.InMemoryLogRecordExporter; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.SdkTracerProviderBuilder; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.internal.SdkTracerProviderUtil; +import io.opentelemetry.sdk.trace.internal.TracerConfig; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; + +class ScopeConfiguratorTest { + + private final InMemoryLogRecordExporter logRecordExporter = InMemoryLogRecordExporter.create(); + private final InMemoryMetricReader metricReader = InMemoryMetricReader.create(); + private final InMemorySpanExporter spanExporter = InMemorySpanExporter.create(); + + private static final InstrumentationScopeInfo scopeA = InstrumentationScopeInfo.create("scopeA"); + private static final InstrumentationScopeInfo scopeB = InstrumentationScopeInfo.create("scopeB"); + private static final InstrumentationScopeInfo scopeC = InstrumentationScopeInfo.create("scopeC"); + + /** Disable "scopeB". All other scopes are enabled by default. */ + @Test + void disableScopeB() { + // Configuration ergonomics will improve after APIs stabilize + SdkTracerProviderBuilder tracerProviderBuilder = SdkTracerProvider.builder(); + SdkTracerProviderUtil.addTracerConfiguratorCondition( + tracerProviderBuilder, nameEquals(scopeB.getName()), TracerConfig.disabled()); + SdkMeterProviderBuilder meterProviderBuilder = SdkMeterProvider.builder(); + SdkMeterProviderUtil.addMeterConfiguratorCondition( + meterProviderBuilder, nameEquals(scopeB.getName()), MeterConfig.disabled()); + SdkLoggerProviderBuilder loggerProviderBuilder = SdkLoggerProvider.builder(); + SdkLoggerProviderUtil.addLoggerConfiguratorCondition( + loggerProviderBuilder, nameEquals(scopeB.getName()), LoggerConfig.disabled()); + + OpenTelemetrySdk sdk = + OpenTelemetrySdk.builder() + .setTracerProvider( + tracerProviderBuilder + .addSpanProcessor(SimpleSpanProcessor.create(spanExporter)) + .build()) + .setMeterProvider(meterProviderBuilder.registerMetricReader(metricReader).build()) + .setLoggerProvider( + loggerProviderBuilder + .addLogRecordProcessor(SimpleLogRecordProcessor.create(logRecordExporter)) + .build()) + .build(); + + simulateInstrumentation(sdk); + + // Collect all the telemetry. Ensure we don't see any from scopeB, and that the telemetry from + // scopeA and scopeC is valid. + assertThat(spanExporter.getFinishedSpanItems()) + .satisfies( + spans -> { + Map> spansByScope = + spans.stream() + .collect(Collectors.groupingBy(SpanData::getInstrumentationScopeInfo)); + assertThat(spansByScope.get(scopeA)).hasSize(1); + assertThat(spansByScope.get(scopeB)).isNull(); + assertThat(spansByScope.get(scopeC)).hasSize(1); + }); + assertThat(metricReader.collectAllMetrics()) + .satisfies( + metrics -> { + Map> metricsByScope = + metrics.stream() + .collect(Collectors.groupingBy(MetricData::getInstrumentationScopeInfo)); + assertThat(metricsByScope.get(scopeA)).hasSize(1); + assertThat(metricsByScope.get(scopeB)).isNull(); + assertThat(metricsByScope.get(scopeC)).hasSize(1); + }); + assertThat(logRecordExporter.getFinishedLogRecordItems()) + .satisfies( + logs -> { + Map> logsByScope = + logs.stream() + .collect(Collectors.groupingBy(LogRecordData::getInstrumentationScopeInfo)); + assertThat(logsByScope.get(scopeA)).hasSize(1); + assertThat(logsByScope.get(scopeB)).isNull(); + assertThat(logsByScope.get(scopeC)).hasSize(1); + }); + } + + /** Disable all scopes by default and enable a single scope. */ + @Test + void disableAllScopesExceptB() { + // Configuration ergonomics will improve after APIs stabilize + SdkTracerProviderBuilder tracerProviderBuilder = SdkTracerProvider.builder(); + SdkTracerProviderUtil.setTracerConfigurator( + tracerProviderBuilder, + TracerConfig.configuratorBuilder() + .setDefault(TracerConfig.disabled()) + .addCondition(nameEquals(scopeB.getName()), TracerConfig.enabled()) + .build()); + SdkMeterProviderBuilder meterProviderBuilder = SdkMeterProvider.builder(); + SdkMeterProviderUtil.setMeterConfigurator( + meterProviderBuilder, + MeterConfig.configuratorBuilder() + .setDefault(MeterConfig.disabled()) + .addCondition(nameEquals(scopeB.getName()), MeterConfig.enabled()) + .build()); + SdkLoggerProviderBuilder loggerProviderBuilder = SdkLoggerProvider.builder(); + SdkLoggerProviderUtil.setLoggerConfigurator( + loggerProviderBuilder, + LoggerConfig.configuratorBuilder() + .setDefault(LoggerConfig.disabled()) + .addCondition(nameEquals(scopeB.getName()), LoggerConfig.enabled()) + .build()); + + OpenTelemetrySdk sdk = + OpenTelemetrySdk.builder() + .setTracerProvider( + tracerProviderBuilder + .addSpanProcessor(SimpleSpanProcessor.create(spanExporter)) + .build()) + .setMeterProvider(meterProviderBuilder.registerMetricReader(metricReader).build()) + .setLoggerProvider( + loggerProviderBuilder + .addLogRecordProcessor(SimpleLogRecordProcessor.create(logRecordExporter)) + .build()) + .build(); + + simulateInstrumentation(sdk); + + // Collect all the telemetry. Ensure we only see telemetry from scopeB, since other scopes have + // been disabled by default. + assertThat(spanExporter.getFinishedSpanItems()) + .satisfies( + spans -> { + Map> spansByScope = + spans.stream() + .collect(Collectors.groupingBy(SpanData::getInstrumentationScopeInfo)); + assertThat(spansByScope.get(scopeA)).isNull(); + assertThat(spansByScope.get(scopeB)).hasSize(1); + assertThat(spansByScope.get(scopeC)).isNull(); + }); + assertThat(metricReader.collectAllMetrics()) + .satisfies( + metrics -> { + Map> metricsByScope = + metrics.stream() + .collect(Collectors.groupingBy(MetricData::getInstrumentationScopeInfo)); + assertThat(metricsByScope.get(scopeA)).isNull(); + assertThat(metricsByScope.get(scopeB)).hasSize(1); + assertThat(metricsByScope.get(scopeC)).isNull(); + }); + assertThat(logRecordExporter.getFinishedLogRecordItems()) + .satisfies( + logs -> { + Map> logsByScope = + logs.stream() + .collect(Collectors.groupingBy(LogRecordData::getInstrumentationScopeInfo)); + assertThat(logsByScope.get(scopeA)).isNull(); + assertThat(logsByScope.get(scopeB)).hasSize(1); + assertThat(logsByScope.get(scopeC)).isNull(); + }); + } + + /** + * Emit spans, metrics and logs in a hierarchy of 3 scopes: scopeA -> scopeB -> scopeC. Exercise + * the scope config which is common across all signals. + */ + private static void simulateInstrumentation(OpenTelemetry openTelemetry) { + // Start scopeA + Tracer scopeATracer = openTelemetry.getTracer(scopeA.getName()); + Meter scopeAMeter = openTelemetry.getMeter(scopeA.getName()); + Logger scopeALogger = openTelemetry.getLogsBridge().get(scopeA.getName()); + Span spanA = scopeATracer.spanBuilder("spanA").startSpan(); + try (Scope spanAScope = spanA.makeCurrent()) { + scopeALogger.logRecordBuilder().setBody("scopeA log message").emit(); + + // Start scopeB + Tracer scopeBTracer = openTelemetry.getTracer(scopeB.getName()); + Meter scopeBMeter = openTelemetry.getMeter(scopeB.getName()); + Logger scopeBLogger = openTelemetry.getLogsBridge().get(scopeB.getName()); + Span spanB = scopeBTracer.spanBuilder("spanB").startSpan(); + try (Scope spanBScope = spanB.makeCurrent()) { + scopeBLogger.logRecordBuilder().setBody("scopeB log message").emit(); + + // Start scopeC + Tracer scopeCTracer = openTelemetry.getTracer(scopeC.getName()); + Meter scopeCMeter = openTelemetry.getMeter(scopeC.getName()); + Logger scopeCLogger = openTelemetry.getLogsBridge().get(scopeC.getName()); + Span spanC = scopeCTracer.spanBuilder("spanC").startSpan(); + try (Scope spanCScope = spanB.makeCurrent()) { + scopeCLogger.logRecordBuilder().setBody("scopeC log message").emit(); + } finally { + spanC.end(); + scopeCMeter.counterBuilder("scopeCCounter").build().add(1); + } + // End scopeC + + } finally { + spanB.end(); + scopeBMeter.counterBuilder("scopeBCounter").build().add(1); + } + // End scopeB + + } finally { + spanA.end(); + scopeAMeter.counterBuilder("scopeACounter").build().add(1); + } + // End scopeA + } +} diff --git a/sdk/all/src/test/java/io/opentelemetry/sdk/common/SystemClockTest.java b/sdk/all/src/test/java/io/opentelemetry/sdk/common/SystemClockTest.java index 5472a6a99e1..6078d3234f4 100644 --- a/sdk/all/src/test/java/io/opentelemetry/sdk/common/SystemClockTest.java +++ b/sdk/all/src/test/java/io/opentelemetry/sdk/common/SystemClockTest.java @@ -18,7 +18,7 @@ class SystemClockTest { @EnabledOnJre(JRE.JAVA_8) @Test - void millisPrecision() { + void now_millisPrecision() { // If we test many times, we can be fairly sure we didn't just get lucky with having a rounded // result on a higher than expected precision timestamp. for (int i = 0; i < 100; i++) { @@ -29,7 +29,7 @@ void millisPrecision() { @DisabledOnJre(JRE.JAVA_8) @Test - void microsPrecision() { + void now_microsPrecision() { // If we test many times, we can be fairly sure we get at least one timestamp that isn't // coincidentally rounded to millis precision. int numHasMicros = 0; @@ -41,4 +41,29 @@ void microsPrecision() { } assertThat(numHasMicros).isNotZero(); } + + @Test + void now_lowPrecision() { + // If we test many times, we can be fairly sure we didn't just get lucky with having a rounded + // result on a higher than expected precision timestamp. + for (int i = 0; i < 100; i++) { + long now = SystemClock.getInstance().now(false); + assertThat(now % 1000000).isZero(); + } + } + + @DisabledOnJre(JRE.JAVA_8) + @Test + void now_highPrecision() { + // If we test many times, we can be fairly sure we get at least one timestamp that isn't + // coincidentally rounded to millis precision. + int numHasMicros = 0; + for (int i = 0; i < 100; i++) { + long now = SystemClock.getInstance().now(true); + if (now % 1000000 != 0) { + numHasMicros++; + } + } + assertThat(numHasMicros).isNotZero(); + } } diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/common/Clock.java b/sdk/common/src/main/java/io/opentelemetry/sdk/common/Clock.java index 9c61dce3271..dab812ebe3b 100644 --- a/sdk/common/src/main/java/io/opentelemetry/sdk/common/Clock.java +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/common/Clock.java @@ -34,9 +34,29 @@ static Clock getDefault() { * // Spend time... * long durationNanos = clock.now() - startNanos; * } + * + *

Calling this is equivalent to calling {@link #now(boolean)} with {@code highPrecision=true}. */ long now(); + /** + * Returns the current epoch timestamp in nanos from this clock. + * + *

This overload of {@link #now()} includes a {@code highPrecision} argument which specifies + * whether the implementation should attempt to resolve higher precision at the potential expense + * of performance. For example, in java 9+ its sometimes possible to resolve ns precision higher + * than the ms precision of {@link System#currentTimeMillis()}, but doing so incurs a performance + * penalty which some callers may wish to avoid. In contrast, we don't currently know if resolving + * ns precision is possible in java 8, regardless of the value of {@code highPrecision}. + * + *

See {@link #now()} javadoc for details on usage. + * + * @since 1.38.0 + */ + default long now(boolean highPrecision) { + return now(); + } + /** * Returns a time measurement with nanosecond precision that can only be used to calculate elapsed * time. diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/common/CompletableResultCode.java b/sdk/common/src/main/java/io/opentelemetry/sdk/common/CompletableResultCode.java index b7f7fa944fd..600ec73a48e 100644 --- a/sdk/common/src/main/java/io/opentelemetry/sdk/common/CompletableResultCode.java +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/common/CompletableResultCode.java @@ -13,6 +13,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; /** @@ -33,9 +34,21 @@ public static CompletableResultCode ofFailure() { return FAILURE; } + /** + * Returns a {@link CompletableResultCode} that has been {@link #failExceptionally(Throwable) + * failed exceptionally}. + * + * @since 1.41.0 + */ + public static CompletableResultCode ofExceptionalFailure(Throwable throwable) { + return new CompletableResultCode().failExceptionally(throwable); + } + /** * Returns a {@link CompletableResultCode} that completes after all the provided {@link - * CompletableResultCode}s complete. If any of the results fail, the result will be failed. + * CompletableResultCode}s complete. If any of the results fail, the result will be failed. If any + * {@link #failExceptionally(Throwable) failed exceptionally}, the result will be failed + * exceptionally with the first {@link Throwable} from {@code codes}. */ public static CompletableResultCode ofAll(Collection codes) { if (codes.isEmpty()) { @@ -44,15 +57,20 @@ public static CompletableResultCode ofAll(Collection code CompletableResultCode result = new CompletableResultCode(); AtomicInteger pending = new AtomicInteger(codes.size()); AtomicBoolean failed = new AtomicBoolean(); + AtomicReference throwableRef = new AtomicReference<>(); for (CompletableResultCode code : codes) { code.whenComplete( () -> { if (!code.isSuccess()) { failed.set(true); + Throwable codeThrowable = code.getFailureThrowable(); + if (codeThrowable != null) { + throwableRef.compareAndSet(null, codeThrowable); + } } if (pending.decrementAndGet() == 0) { if (failed.get()) { - result.fail(); + result.failInternal(throwableRef.get()); } else { result.succeed(); } @@ -71,6 +89,10 @@ public CompletableResultCode() {} @GuardedBy("lock") private Boolean succeeded = null; + @Nullable + @GuardedBy("lock") + private Throwable throwable = null; + @GuardedBy("lock") private final List completionActions = new ArrayList<>(); @@ -89,11 +111,30 @@ public CompletableResultCode succeed() { return this; } - /** Complete this {@link CompletableResultCode} unsuccessfully if it is not already completed. */ + /** + * Complete this {@link CompletableResultCode} unsuccessfully if it is not already completed, + * setting the {@link #getFailureThrowable() failure throwable} to {@code null}. + */ public CompletableResultCode fail() { + return failInternal(null); + } + + /** + * Completes this {@link CompletableResultCode} unsuccessfully if it is not already completed, + * setting the {@link #getFailureThrowable() failure throwable} to {@code throwable}. + * + * @param throwable the {@code Throwable} that caused the failure, or {@code null} + * @since 1.41.0 + */ + public CompletableResultCode failExceptionally(@Nullable Throwable throwable) { + return failInternal(throwable); + } + + private CompletableResultCode failInternal(@Nullable Throwable throwable) { synchronized (lock) { if (succeeded == null) { succeeded = false; + this.throwable = throwable; for (Runnable action : completionActions) { action.run(); } @@ -104,7 +145,7 @@ public CompletableResultCode fail() { /** * Obtain the current state of completion. Generally call once completion is achieved via the - * thenRun method. + * {@link #whenComplete(Runnable)} method. * * @return the current state of completion */ @@ -114,6 +155,23 @@ public boolean isSuccess() { } } + /** + * Returns {@link Throwable} if this {@link CompletableResultCode} was {@link + * #failExceptionally(Throwable) failed exceptionally}. Generally call once completion is achieved + * via the {@link #whenComplete(Runnable)} method. + * + * @return the throwable if failed exceptionally, or null if: {@link #fail() failed without + * exception}, {@link #succeed() succeeded}, {@link #failExceptionally(Throwable)} with a null + * {@code throwable}, or not complete. + * @since 1.41.0 + */ + @Nullable + public Throwable getFailureThrowable() { + synchronized (lock) { + return throwable; + } + } + /** * Perform an action on completion. Actions are guaranteed to be called only once. * diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/common/SystemClock.java b/sdk/common/src/main/java/io/opentelemetry/sdk/common/SystemClock.java index 23265b3b5b4..3131f6834e0 100644 --- a/sdk/common/src/main/java/io/opentelemetry/sdk/common/SystemClock.java +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/common/SystemClock.java @@ -6,6 +6,7 @@ package io.opentelemetry.sdk.common; import io.opentelemetry.sdk.internal.JavaVersionSpecific; +import java.util.concurrent.TimeUnit; import javax.annotation.concurrent.ThreadSafe; /** @@ -26,7 +27,15 @@ static Clock getInstance() { @Override public long now() { - return JavaVersionSpecific.get().currentTimeNanos(); + return now(true); + } + + @Override + public long now(boolean highPrecision) { + if (highPrecision) { + return JavaVersionSpecific.get().currentTimeNanos(); + } + return TimeUnit.MILLISECONDS.toNanos(System.currentTimeMillis()); } @Override diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/common/export/MemoryMode.java b/sdk/common/src/main/java/io/opentelemetry/sdk/common/export/MemoryMode.java new file mode 100644 index 00000000000..4ebf23a9deb --- /dev/null +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/common/export/MemoryMode.java @@ -0,0 +1,48 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.common.export; + +/** + * The memory semantics of the SDK. + * + * @since 1.31.0 + */ +public enum MemoryMode { + + /** + * Reuses objects to reduce allocations. + * + *

In this mode, the SDK reuses objects to reduce allocations, at the expense of disallowing + * concurrent collections / exports. + * + *

Metric Signal: For DELTA aggregation temporality, the memory used for recording and + * aggregating metric values is kept between MetricReader collect operation, to avoid memory + * allocations. When the configured maximum cardinality of Attributes is reached, unused + * Attributes are cleared from memory during collect operation, at the cost of requiring new + * memory allocations the next time those attributes are used. Allocations can be minimized by + * increasing the configured max cardinality. For example, suppose instrumentation has recorded + * values for 1000 unique Attributes while the max cardinality configured was 2000. If after a + * collection only 100 unique Attributes values are recorded, the MetricReader's collect operation + * would return 100 points, while in memory the Attributes data structure keeps 1000 unique + * Attributes. If a user recorded values for 3000 unique attributes, the values for the first 1999 + * Attributes would be recorded, and the rest of 1001 unique Attributes values would be recorded + * in the CARDINALITY_OVERFLOW Attributes. If after several collect operations, the user now + * records values to only 500 unique attributes, during collect operation, the unused 1500 + * Attributes memory would be cleared from memory. + */ + REUSABLE_DATA, + + /** + * Uses immutable data structures. + * + *

In this mode, the SDK passes immutable objects to exporters / readers, increasing + * allocations but ensuring safe concurrent exports. + * + *

Metric Signal: In DELTA aggregation temporality, the memory used for recording and + * aggregating Attributes values is cleared during a MetricReader collect operation. + */ + IMMUTABLE_DATA +} diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/common/export/ProxyOptions.java b/sdk/common/src/main/java/io/opentelemetry/sdk/common/export/ProxyOptions.java new file mode 100644 index 00000000000..5e4ad463a92 --- /dev/null +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/common/export/ProxyOptions.java @@ -0,0 +1,75 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.common.export; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.net.ProxySelector; +import java.net.SocketAddress; +import java.net.URI; +import java.util.Collections; +import java.util.List; + +/** + * Configuration for proxy settings. + * + * @since 1.36.0 + */ +public final class ProxyOptions { + private final ProxySelector proxySelector; + + private ProxyOptions(ProxySelector proxySelector) { + this.proxySelector = proxySelector; + } + + /** Create proxy options with the {@code proxySelector}. */ + public static ProxyOptions create(ProxySelector proxySelector) { + return new ProxyOptions(proxySelector); + } + + /** + * Create proxy options with a {@link ProxySelector} which always uses an {@link Proxy.Type#HTTP} + * proxy with the {@code socketAddress}. + */ + public static ProxyOptions create(InetSocketAddress socketAddress) { + return new ProxyOptions(new SimpleProxySelector(new Proxy(Proxy.Type.HTTP, socketAddress))); + } + + /** Return the {@link ProxySelector}. */ + public ProxySelector getProxySelector() { + return proxySelector; + } + + @Override + public String toString() { + return "ProxyOptions{proxySelector=" + proxySelector + "}"; + } + + private static final class SimpleProxySelector extends ProxySelector { + + private final List proxyList; + + private SimpleProxySelector(Proxy proxy) { + this.proxyList = Collections.singletonList(proxy); + } + + @Override + public List select(URI uri) { + return proxyList; + } + + @Override + public void connectFailed(URI uri, SocketAddress sa, IOException e) { + // ignore + } + + @Override + public String toString() { + return "SimpleProxySelector{proxy=" + proxyList.get(0).toString() + "}"; + } + } +} diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/common/export/RetryPolicy.java b/sdk/common/src/main/java/io/opentelemetry/sdk/common/export/RetryPolicy.java index 1191f80ff35..15fdf2d287e 100644 --- a/sdk/common/src/main/java/io/opentelemetry/sdk/common/export/RetryPolicy.java +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/common/export/RetryPolicy.java @@ -8,7 +8,10 @@ import static io.opentelemetry.api.internal.Utils.checkArgument; import com.google.auto.value.AutoValue; +import java.io.IOException; import java.time.Duration; +import java.util.function.Predicate; +import javax.annotation.Nullable; /** * Configuration for exporter exponential retry policy. @@ -66,6 +69,15 @@ public static RetryPolicyBuilder builder() { /** Returns the backoff multiplier. */ public abstract double getBackoffMultiplier(); + /** + * Returns the predicate used to determine if an attempt which failed exceptionally should be + * retried, or {@code null} if the exporter specific default predicate should be used. + * + * @since 1.47.0 + */ + @Nullable + public abstract Predicate getRetryExceptionPredicate(); + /** Builder for {@link RetryPolicy}. */ @AutoValue.Builder public abstract static class RetryPolicyBuilder { @@ -96,6 +108,15 @@ public abstract static class RetryPolicyBuilder { */ public abstract RetryPolicyBuilder setBackoffMultiplier(double backoffMultiplier); + /** + * Set the predicate used to determine if an attempt which failed exceptionally should be + * retried. By default, an exporter specific default predicate should be used. + * + * @since 1.47.0 + */ + public abstract RetryPolicyBuilder setRetryExceptionPredicate( + Predicate retryExceptionPredicate); + abstract RetryPolicy autoBuild(); /** Build and return a {@link RetryPolicy} with the values of this builder. */ diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/internal/AttributesMap.java b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/AttributesMap.java index 735d5eb1341..9616eb75fcf 100644 --- a/sdk/common/src/main/java/io/opentelemetry/sdk/internal/AttributesMap.java +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/AttributesMap.java @@ -18,6 +18,14 @@ * A map with a fixed capacity that drops attributes when the map gets full, and which truncates * string and array string attribute values to the {@link #lengthLimit}. * + *

WARNING: In order to reduce memory allocation, this class extends {@link HashMap} when it + * would be more appropriate to delegate. The problem with extending is that we don't enforce that + * all {@link HashMap} methods for reading / writing data conform to the configured attribute + * limits. Therefore, it's easy to accidentally call something like {@link Map#putAll(Map)} or + * {@link Map#put(Object, Object)} and bypass the restrictions (see #7135). Callers MUST + * take care to only call methods from {@link AttributesMap}, and not {@link HashMap}. + * *

This class is internal and is hence not for public use. Its APIs are unstable and can change * at any time. */ @@ -44,13 +52,18 @@ public static AttributesMap create(long capacity, int lengthLimit) { return new AttributesMap(capacity, lengthLimit); } - /** Add the attribute key value pair, applying capacity and length limits. */ - public void put(AttributeKey key, T value) { + /** + * Add the attribute key value pair, applying capacity and length limits. Callers MUST ensure the + * {@code value} type matches the type required by {@code key}. + */ + @Override + @Nullable + public Object put(AttributeKey key, Object value) { totalAddedValues++; if (size() >= capacity && !containsKey(key)) { - return; + return null; } - super.put(key, AttributeUtil.applyAttributeLengthLimit(value, lengthLimit)); + return super.put(key, AttributeUtil.applyAttributeLengthLimit(value, lengthLimit)); } /** Get the total number of attributes added, including those dropped for capcity limits. */ diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/internal/DaemonThreadFactory.java b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/DaemonThreadFactory.java index 9e319d8f735..e8f75abe40f 100644 --- a/sdk/common/src/main/java/io/opentelemetry/sdk/internal/DaemonThreadFactory.java +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/DaemonThreadFactory.java @@ -5,6 +5,7 @@ package io.opentelemetry.sdk.internal; +import io.opentelemetry.context.Context; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicInteger; @@ -20,14 +21,30 @@ public final class DaemonThreadFactory implements ThreadFactory { private final String namePrefix; private final AtomicInteger counter = new AtomicInteger(); private final ThreadFactory delegate = Executors.defaultThreadFactory(); + private final boolean propagateContextForTesting; public DaemonThreadFactory(String namePrefix) { + this(namePrefix, /* propagateContextForTesting= */ false); + } + + /** + * {@link DaemonThreadFactory}'s constructor. + * + * @param namePrefix Used when setting the new thread's name. + * @param propagateContextForTesting For tests only. When enabled, the current thread's {@link + * Context} will be passed over to the new threads, this is useful for validating scenarios + * where context propagation is available through bytecode instrumentation. + */ + public DaemonThreadFactory(String namePrefix, boolean propagateContextForTesting) { this.namePrefix = namePrefix; + this.propagateContextForTesting = propagateContextForTesting; } @Override public Thread newThread(Runnable runnable) { - Thread t = delegate.newThread(runnable); + Thread t = + delegate.newThread( + propagateContextForTesting ? Context.current().wrap(runnable) : runnable); try { t.setDaemon(true); t.setName(namePrefix + "-" + counter.incrementAndGet()); diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/internal/DynamicPrimitiveLongList.java b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/DynamicPrimitiveLongList.java new file mode 100644 index 00000000000..7dc138ae0b5 --- /dev/null +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/DynamicPrimitiveLongList.java @@ -0,0 +1,150 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.internal; + +import java.util.AbstractList; + +/** + * A resizable list for storing primitive `long` values. + * + *

This class implements a dynamically resizable list specifically for primitive long values. The + * values are stored in a chain of arrays (named sub-array), so it can grow efficiently, by adding + * more sub-arrays per its defined size. The backing array also helps avoid auto-boxing and helps + * provide access to values as primitives without boxing. + * + *

The list is designed to minimize memory allocations, by: + * + *

    + *
  1. Adding sub-arrays and not creating new arrays and copying. + *
  2. When the size is changing to a smaller size, arrays are not removed. + *
+ * + *

Supported {@code List} methods: + * + *

    + *
  • {@link #get(int)} - Retrieves the element at the specified position in this list as a + * {@code Long} object. + *
  • {@link #set(int, Long)} - Replaces the element at the specified position in this list with + * the specified {@code Long} object. + *
  • {@link #size()} - Returns the number of elements in this list. + *
+ * + *

Additional utility methods: + * + *

    + *
  • {@link #getLong(int)} - Retrieves the element at the specified position in this list as a + * primitive long. + *
  • {@link #setLong(int, long)} - Replaces the element at the specified position in this list + * with the specified primitive long element. + *
  • {@link #resizeAndClear(int)} - Resizes the list to the specified size, resetting all + * elements to zero. + *
+ * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + *

This class is not thread-safe. + */ +public class DynamicPrimitiveLongList extends AbstractList { + + private static final int DEFAULT_SUBARRAY_CAPACITY = 10; + private final int subarrayCapacity; + private long[][] arrays; + private int size; + private int arrayCount; + + public static DynamicPrimitiveLongList of(long... values) { + DynamicPrimitiveLongList list = new DynamicPrimitiveLongList(); + list.resizeAndClear(values.length); + for (int i = 0; i < values.length; i++) { + list.setLong(i, values[i]); + } + return list; + } + + public static DynamicPrimitiveLongList ofSubArrayCapacity(int subarrayCapacity) { + return new DynamicPrimitiveLongList(subarrayCapacity); + } + + public static DynamicPrimitiveLongList empty() { + return new DynamicPrimitiveLongList(); + } + + DynamicPrimitiveLongList() { + this(DEFAULT_SUBARRAY_CAPACITY); + } + + DynamicPrimitiveLongList(int subarrayCapacity) { + if (subarrayCapacity <= 0) { + throw new IllegalArgumentException("Subarray capacity must be positive"); + } + this.subarrayCapacity = subarrayCapacity; + arrays = new long[0][subarrayCapacity]; + arrayCount = 0; + size = 0; + } + + @Override + public Long get(int index) { + return getLong(index); + } + + public long getLong(int index) { + rangeCheck(index); + return arrays[index / subarrayCapacity][index % subarrayCapacity]; + } + + @Override + public Long set(int index, Long element) { + return setLong(index, element); + } + + public long setLong(int index, long element) { + rangeCheck(index); + long oldValue = arrays[index / subarrayCapacity][index % subarrayCapacity]; + arrays[index / subarrayCapacity][index % subarrayCapacity] = element; + return oldValue; + } + + @Override + public int size() { + return size; + } + + public void resizeAndClear(int newSize) { + if (newSize < 0) { + throw new IllegalArgumentException("New size must be non-negative"); + } + ensureCapacity(newSize); + size = newSize; + for (int i = 0; i < newSize; i++) { + setLong(i, 0); + } + } + + private void ensureCapacity(int minCapacity) { + // A faster way to do ceil(minCapacity/subArrayCapacity) + int requiredArrays = (minCapacity + subarrayCapacity - 1) / subarrayCapacity; + + if (requiredArrays > arrayCount) { + arrays = java.util.Arrays.copyOf(arrays, /* newLength= */ requiredArrays); + for (int i = arrayCount; i < requiredArrays; i++) { + arrays[i] = new long[subarrayCapacity]; + } + arrayCount = requiredArrays; + } + } + + private void rangeCheck(int index) { + if (index < 0 || index >= size) { + throw new IndexOutOfBoundsException(outOfBoundsMsg(index)); + } + } + + private String outOfBoundsMsg(int index) { + return "Index: " + index + ", Size: " + size; + } +} diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/internal/GlobUtil.java b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/GlobUtil.java new file mode 100644 index 00000000000..9c914055d7d --- /dev/null +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/GlobUtil.java @@ -0,0 +1,82 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.internal; + +import java.util.function.Predicate; +import java.util.regex.Pattern; + +/** + * Utilities for glob pattern matching. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class GlobUtil { + + private GlobUtil() {} + + /** + * Return a predicate that returns {@code true} if a string matches the {@code globPattern}. + * + *

{@code globPattern} may contain the wildcard characters {@code *} and {@code ?} with the + * following matching criteria: + * + *

    + *
  • {@code *} matches 0 or more instances of any character + *
  • {@code ?} matches exactly one instance of any character + *
+ */ + public static Predicate toGlobPatternPredicate(String globPattern) { + // Match all + if (globPattern.equals("*")) { + return unused -> true; + } + + // If globPattern contains '*' or '?', convert it to a regex and return corresponding predicate + for (int i = 0; i < globPattern.length(); i++) { + char c = globPattern.charAt(i); + if (c == '*' || c == '?') { + Pattern pattern = toRegexPattern(globPattern); + return string -> pattern.matcher(string).matches(); + } + } + + // Exact match, ignoring case + return globPattern::equalsIgnoreCase; + } + + /** + * Transform the {@code globPattern} to a regex by converting {@code *} to {@code .*}, {@code ?} + * to {@code .}, and escaping other regex special characters. + */ + private static Pattern toRegexPattern(String globPattern) { + int tokenStart = -1; + StringBuilder patternBuilder = new StringBuilder(); + for (int i = 0; i < globPattern.length(); i++) { + char c = globPattern.charAt(i); + if (c == '*' || c == '?') { + if (tokenStart != -1) { + patternBuilder.append(Pattern.quote(globPattern.substring(tokenStart, i))); + tokenStart = -1; + } + if (c == '*') { + patternBuilder.append(".*"); + } else { + // c == '?' + patternBuilder.append("."); + } + } else { + if (tokenStart == -1) { + tokenStart = i; + } + } + } + if (tokenStart != -1) { + patternBuilder.append(Pattern.quote(globPattern.substring(tokenStart))); + } + return Pattern.compile(patternBuilder.toString()); + } +} diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/internal/ScopeConfigurator.java b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/ScopeConfigurator.java new file mode 100644 index 00000000000..cb1fdaeed43 --- /dev/null +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/ScopeConfigurator.java @@ -0,0 +1,33 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.internal; + +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import java.util.function.Function; + +/** + * A {@link ScopeConfigurator} computes configuration for a given {@link InstrumentationScopeInfo}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@FunctionalInterface +public interface ScopeConfigurator extends Function { + + /** Create a new builder. */ + static ScopeConfiguratorBuilder builder() { + return new ScopeConfiguratorBuilder<>(unused -> null); + } + + /** + * Convert this {@link ScopeConfigurator} to a builder. Additional added matchers only apply when + * {@link #apply(Object)} returns {@code null}. If this configurator contains {@link + * ScopeConfiguratorBuilder#setDefault(Object)}, additional matchers are never applied. + */ + default ScopeConfiguratorBuilder toBuilder() { + return new ScopeConfiguratorBuilder<>(this); + } +} diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/internal/ScopeConfiguratorBuilder.java b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/ScopeConfiguratorBuilder.java new file mode 100644 index 00000000000..4c32e2f8d9a --- /dev/null +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/ScopeConfiguratorBuilder.java @@ -0,0 +1,114 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.internal; + +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Predicate; +import javax.annotation.Nullable; + +/** + * Builder for {@link ScopeConfigurator}. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + * @param The scope configuration object, e.g. {@code TracerConfig}, {@code LoggerConfig}, + * {@code MeterConfig}. + */ +public final class ScopeConfiguratorBuilder { + + private final ScopeConfigurator baseScopeConfigurator; + @Nullable private T defaultScopeConfig; + private final List> conditions = new ArrayList<>(); + + ScopeConfiguratorBuilder(ScopeConfigurator baseScopeConfigurator) { + this.baseScopeConfigurator = baseScopeConfigurator; + } + + /** + * Set the default scope config, which is returned by {@link ScopeConfigurator#apply(Object)} if a + * {@link InstrumentationScopeInfo} does not match any {@link #addCondition(Predicate, Object) + * conditions}. If a default is not set, an SDK defined default is used. + */ + public ScopeConfiguratorBuilder setDefault(T defaultScopeConfig) { + this.defaultScopeConfig = defaultScopeConfig; + return this; + } + + /** + * Add a condition. Conditions are evaluated in order. The {@code scopeConfig} for the first match + * is returned by {@link ScopeConfigurator#apply(Object)}. + * + * @param scopePredicate predicate that {@link InstrumentationScopeInfo}s are evaluated against + * @param scopeConfig the scope config to use when this condition is the first matching {@code + * scopePredicate} + * @see #nameMatchesGlob(String) + * @see #nameEquals(String) + */ + public ScopeConfiguratorBuilder addCondition( + Predicate scopePredicate, T scopeConfig) { + conditions.add(new Condition<>(scopePredicate, scopeConfig)); + return this; + } + + /** + * Helper function for pattern matching {@link InstrumentationScopeInfo#getName()} against the + * {@code globPattern}. + * + *

{@code globPattern} may contain the wildcard characters {@code *} and {@code ?} with the + * following matching criteria: + * + *

    + *
  • {@code *} matches 0 or more instances of any character + *
  • {@code ?} matches exactly one instance of any character + *
+ * + * @see #addCondition(Predicate, Object) + */ + public static Predicate nameMatchesGlob(String globPattern) { + Predicate globPredicate = GlobUtil.toGlobPatternPredicate(globPattern); + return scopeInfo -> globPredicate.test(scopeInfo.getName()); + } + + /** + * Helper function for exact matching {@link InstrumentationScopeInfo#getName()} against the + * {@code scopeName}. + * + * @see #addCondition(Predicate, Object) + */ + public static Predicate nameEquals(String scopeName) { + return scopeInfo -> scopeInfo.getName().equals(scopeName); + } + + /** Build a {@link ScopeConfigurator} with the configuration of this builder. */ + public ScopeConfigurator build() { + // TODO: return an instance with toString implementation which self describes rules + return scopeInfo -> { + T scopeConfig = baseScopeConfigurator.apply(scopeInfo); + if (scopeConfig != null) { + return scopeConfig; + } + for (Condition condition : conditions) { + if (condition.scopeMatcher.test(scopeInfo)) { + return condition.scopeConfig; + } + } + return defaultScopeConfig; + }; + } + + private static final class Condition { + private final Predicate scopeMatcher; + private final T scopeConfig; + + private Condition(Predicate scopeMatcher, T scopeConfig) { + this.scopeMatcher = scopeMatcher; + this.scopeConfig = scopeConfig; + } + } +} diff --git a/sdk/common/src/main/java/io/opentelemetry/sdk/internal/ThrottlingLogger.java b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/ThrottlingLogger.java index d4bd64d1af8..e2c671cdff9 100644 --- a/sdk/common/src/main/java/io/opentelemetry/sdk/internal/ThrottlingLogger.java +++ b/sdk/common/src/main/java/io/opentelemetry/sdk/internal/ThrottlingLogger.java @@ -41,7 +41,8 @@ public ThrottlingLogger(Logger delegate) { this.fastRateLimiter = new RateLimiter(RATE_LIMIT / rateTimeUnit.toSeconds(1), RATE_LIMIT, clock); this.throttledRateLimiter = - new RateLimiter(RATE_LIMIT / rateTimeUnit.toSeconds(1), THROTTLED_RATE_LIMIT, clock); + new RateLimiter( + THROTTLED_RATE_LIMIT / rateTimeUnit.toSeconds(1), THROTTLED_RATE_LIMIT, clock); } /** Log a message at the given level. */ diff --git a/sdk/common/src/test/java/io/opentelemetry/sdk/common/CompletableResultCodeTest.java b/sdk/common/src/test/java/io/opentelemetry/sdk/common/CompletableResultCodeTest.java index 544777430f5..b83e3e03ec7 100644 --- a/sdk/common/src/test/java/io/opentelemetry/sdk/common/CompletableResultCodeTest.java +++ b/sdk/common/src/test/java/io/opentelemetry/sdk/common/CompletableResultCodeTest.java @@ -21,12 +21,32 @@ class CompletableResultCodeTest { @Test void ofSuccess() { - assertThat(CompletableResultCode.ofSuccess().isSuccess()).isTrue(); + assertThat(CompletableResultCode.ofSuccess()) + .satisfies( + code -> { + assertThat(code.isSuccess()).isTrue(); + assertThat(code.getFailureThrowable()).isNull(); + }); } @Test void ofFailure() { - assertThat(CompletableResultCode.ofFailure().isSuccess()).isFalse(); + assertThat(CompletableResultCode.ofFailure()) + .satisfies( + code -> { + assertThat(code.isSuccess()).isFalse(); + assertThat(code.getFailureThrowable()).isNull(); + }); + } + + @Test + void ofExceptionalFailure() { + assertThat(CompletableResultCode.ofExceptionalFailure(new Exception("error"))) + .satisfies( + code -> { + assertThat(code.isSuccess()).isFalse(); + assertThat(code.getFailureThrowable()).hasMessage("error"); + }); } @Test @@ -59,6 +79,15 @@ void fail() throws InterruptedException { assertThat(resultCode.isSuccess()).isFalse(); } + @Test + void failExceptionallyWithNull() { + CompletableResultCode resultCode = new CompletableResultCode(); + CompletableResultCode result = resultCode.failExceptionally(null); + assertThat(result.isDone()).isTrue(); + assertThat(result.isSuccess()).isFalse(); + assertThat(result.getFailureThrowable()).isNull(); + } + @Test void whenDoublyCompleteSuccessfully() throws InterruptedException { CompletableResultCode resultCode = new CompletableResultCode(); @@ -149,6 +178,24 @@ void ofAllWithFailure() { .isFalse(); } + @Test + void ofAllWithExceptionalFailure() { + assertThat( + CompletableResultCode.ofAll( + Arrays.asList( + CompletableResultCode.ofSuccess(), + CompletableResultCode.ofFailure(), + CompletableResultCode.ofExceptionalFailure(new Exception("error1")), + CompletableResultCode.ofExceptionalFailure(new Exception("error2")), + CompletableResultCode.ofSuccess()))) + .satisfies( + code -> { + assertThat(code.isSuccess()).isFalse(); + // failure throwable is set to first throwable seen in the collection + assertThat(code.getFailureThrowable()).hasMessage("error1"); + }); + } + @Test void join() { CompletableResultCode result = new CompletableResultCode(); diff --git a/sdk/common/src/test/java/io/opentelemetry/sdk/common/RetryPolicyTest.java b/sdk/common/src/test/java/io/opentelemetry/sdk/common/RetryPolicyTest.java index 7f63a8dc9df..e0f56ff58a5 100644 --- a/sdk/common/src/test/java/io/opentelemetry/sdk/common/RetryPolicyTest.java +++ b/sdk/common/src/test/java/io/opentelemetry/sdk/common/RetryPolicyTest.java @@ -40,6 +40,7 @@ void build() { assertThat(retryPolicy.getInitialBackoff()).isEqualTo(Duration.ofMillis(2)); assertThat(retryPolicy.getMaxBackoff()).isEqualTo(Duration.ofSeconds(1)); assertThat(retryPolicy.getBackoffMultiplier()).isEqualTo(1.1); + assertThat(retryPolicy.getRetryExceptionPredicate()).isEqualTo(null); } @Test diff --git a/sdk/common/src/test/java/io/opentelemetry/sdk/internal/DynamicPrimitiveLongListTest.java b/sdk/common/src/test/java/io/opentelemetry/sdk/internal/DynamicPrimitiveLongListTest.java new file mode 100644 index 00000000000..a5f7bf50f2a --- /dev/null +++ b/sdk/common/src/test/java/io/opentelemetry/sdk/internal/DynamicPrimitiveLongListTest.java @@ -0,0 +1,132 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.internal; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import org.junit.jupiter.api.Test; + +class DynamicPrimitiveLongListTest { + + @Test + void subArrayCapacityMustBePositive() { + assertThatThrownBy( + () -> { + int subArrayCapacity = 0; + new DynamicPrimitiveLongList(subArrayCapacity); + }) + .isInstanceOf(IllegalArgumentException.class); + + assertThatThrownBy( + () -> { + int subArrayCapacity = -2; + new DynamicPrimitiveLongList(subArrayCapacity); + }) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + void newListIsEmpty() { + DynamicPrimitiveLongList list = new DynamicPrimitiveLongList(); + assertThat(list).isEmpty(); + assertThatThrownBy(() -> list.getLong(0)).isInstanceOf(IndexOutOfBoundsException.class); + } + + @Test + void resizeListAndSetElement() { + DynamicPrimitiveLongList list = new DynamicPrimitiveLongList(); + list.resizeAndClear(5); + list.setLong(3, 10L); + + for (int i = 0; i < 5; i++) { + if (i == 3) { + assertThat(list.getLong(i)).isEqualTo(10L); + } else { + assertThat(list.getLong(i)).isEqualTo(0L); + } + } + } + + @Test + void resizeAndFillThenResizeSmallerAndCheck() { + DynamicPrimitiveLongList list = new DynamicPrimitiveLongList(); + list.resizeAndClear(6); + + for (int i = 0; i < 6; i++) { + list.setLong(i, i + 1); + } + + list.resizeAndClear(3); + + for (int i = 0; i < 3; i++) { + assertThat(list.getLong(i)).isEqualTo(0L); + } + + assertThatThrownBy(() -> list.getLong(4)).isInstanceOf(IndexOutOfBoundsException.class); + + for (int i = 0; i < 3; i++) { + list.setLong(i, i + 10); + assertThat(list.getLong(i)).isEqualTo(i + 10); + } + } + + @Test + void resizeToNegativeNumber() { + assertThatThrownBy(() -> DynamicPrimitiveLongList.of(0, 10, 20).resizeAndClear(-2)) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + void resizeAndFillThenResizeLargerAndCheck() { + DynamicPrimitiveLongList list = new DynamicPrimitiveLongList(); + list.resizeAndClear(6); + + for (int i = 0; i < 6; i++) { + list.setLong(i, i + 1); + } + + list.resizeAndClear(8); + + for (int i = 0; i < 8; i++) { + assertThat(list.getLong(i)).isEqualTo(0L); + } + + assertThatThrownBy(() -> list.getLong(8)).isInstanceOf(IndexOutOfBoundsException.class); + } + + @Test + void of() { + DynamicPrimitiveLongList list = DynamicPrimitiveLongList.of(1, 4, 5, 6); + assertThat(list.getLong(0)).isEqualTo(1); + assertThat(list.getLong(1)).isEqualTo(4); + assertThat(list.getLong(2)).isEqualTo(5); + assertThat(list.getLong(3)).isEqualTo(6); + + list = DynamicPrimitiveLongList.of(); + assertThat(list).isEmpty(); + } + + @Test + void empty() { + DynamicPrimitiveLongList list = DynamicPrimitiveLongList.empty(); + assertThat(list).isEmpty(); + + // I can still add elements + list.resizeAndClear(1); + list.set(0, 10L); + assertThat(list.getLong(0)).isEqualTo(10L); + } + + @Test + void set() { + DynamicPrimitiveLongList list = DynamicPrimitiveLongList.of(0, 10, 20); + assertThat(list.get(1)).isEqualTo(10L); + + list.set(1, 100L); + assertThat(list.get(1)).isEqualTo(100L); + } +} diff --git a/sdk/common/src/test/java/io/opentelemetry/sdk/internal/GlobUtilTest.java b/sdk/common/src/test/java/io/opentelemetry/sdk/internal/GlobUtilTest.java new file mode 100644 index 00000000000..a26928f34bc --- /dev/null +++ b/sdk/common/src/test/java/io/opentelemetry/sdk/internal/GlobUtilTest.java @@ -0,0 +1,46 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.internal; + +import static io.opentelemetry.sdk.internal.GlobUtil.toGlobPatternPredicate; +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Test; + +class GlobUtilTest { + + @Test + void matchesName() { + assertThat(toGlobPatternPredicate("foo").test("foo")).isTrue(); + assertThat(toGlobPatternPredicate("foo").test("Foo")).isTrue(); + assertThat(toGlobPatternPredicate("foo").test("bar")).isFalse(); + assertThat(toGlobPatternPredicate("fo?").test("foo")).isTrue(); + assertThat(toGlobPatternPredicate("fo??").test("fooo")).isTrue(); + assertThat(toGlobPatternPredicate("fo?").test("fob")).isTrue(); + assertThat(toGlobPatternPredicate("fo?").test("fooo")).isFalse(); + assertThat(toGlobPatternPredicate("*").test("foo")).isTrue(); + assertThat(toGlobPatternPredicate("*").test("bar")).isTrue(); + assertThat(toGlobPatternPredicate("*").test("baz")).isTrue(); + assertThat(toGlobPatternPredicate("*").test("foo.bar.baz")).isTrue(); + assertThat(toGlobPatternPredicate("*").test(null)).isTrue(); + assertThat(toGlobPatternPredicate("*").test("")).isTrue(); + assertThat(toGlobPatternPredicate("fo*").test("fo")).isTrue(); + assertThat(toGlobPatternPredicate("fo*").test("foo")).isTrue(); + assertThat(toGlobPatternPredicate("fo*").test("fooo")).isTrue(); + assertThat(toGlobPatternPredicate("fo*").test("foo.bar.baz")).isTrue(); + assertThat(toGlobPatternPredicate("*bar").test("sandbar")).isTrue(); + assertThat(toGlobPatternPredicate("fo*b*").test("foobar")).isTrue(); + assertThat(toGlobPatternPredicate("fo*b*").test("foob")).isTrue(); + assertThat(toGlobPatternPredicate("fo*b*").test("foo bar")).isTrue(); + assertThat(toGlobPatternPredicate("fo? b??").test("foo bar")).isTrue(); + assertThat(toGlobPatternPredicate("fo? b??").test("fooo bar")).isFalse(); + assertThat(toGlobPatternPredicate("fo* ba?").test("foo is not bar")).isTrue(); + assertThat(toGlobPatternPredicate("fo? b*").test("fox beetles for lunch")).isTrue(); + assertThat(toGlobPatternPredicate("f()[]$^.{}|").test("f()[]$^.{}|")).isTrue(); + assertThat(toGlobPatternPredicate("f()[]$^.{}|?").test("f()[]$^.{}|o")).isTrue(); + assertThat(toGlobPatternPredicate("f()[]$^.{}|*").test("f()[]$^.{}|ooo")).isTrue(); + } +} diff --git a/sdk/common/src/test/java/io/opentelemetry/sdk/internal/ThrottlingLoggerTest.java b/sdk/common/src/test/java/io/opentelemetry/sdk/internal/ThrottlingLoggerTest.java index 1c35b23ba0f..624c0fc5fec 100644 --- a/sdk/common/src/test/java/io/opentelemetry/sdk/internal/ThrottlingLoggerTest.java +++ b/sdk/common/src/test/java/io/opentelemetry/sdk/internal/ThrottlingLoggerTest.java @@ -144,4 +144,41 @@ void afterAMinuteLetOneThrough() { assertThat(logs.getEvents()).hasSize(9); assertThat(logs.getEvents().get(8).getMessage()).isEqualTo("oh no!"); } + + @Test + void allowOnlyOneLogPerMinuteAfterSuppression() { + TestClock clock = TestClock.create(); + ThrottlingLogger logger = new ThrottlingLogger(realLogger, clock); + + logger.log(Level.WARNING, "oh no!"); + logger.log(Level.WARNING, "oh no!"); + logger.log(Level.WARNING, "oh no!"); + logger.log(Level.WARNING, "oh no!"); + logger.log(Level.WARNING, "oh no!"); + + logger.log(Level.WARNING, "oh no I should trigger suppression!"); + logger.log(Level.WARNING, "oh no I should be suppressed!"); + + assertThat(logs.getEvents()).hasSize(7); + + clock.advance(Duration.ofMillis(12_001)); + logger.log(Level.WARNING, "suppression 1"); + clock.advance(Duration.ofMillis(12_001)); + logger.log(Level.WARNING, "suppression 2"); + clock.advance(Duration.ofMillis(12_001)); + logger.log(Level.WARNING, "suppression 3"); + clock.advance(Duration.ofMillis(12_001)); + logger.log(Level.WARNING, "suppression 4"); + clock.advance(Duration.ofMillis(12_001)); + logger.log(Level.WARNING, "allowed 1"); + + logs.assertDoesNotContain("suppression 1"); + logs.assertDoesNotContain("suppression 2"); + logs.assertDoesNotContain("suppression 3"); + logs.assertDoesNotContain("suppression 4"); + logs.assertContains("allowed 1"); + + assertThat(logs.getEvents()).hasSize(8); + assertThat(logs.getEvents().get(7).getMessage()).isEqualTo("allowed 1"); + } } diff --git a/sdk/logs/build.gradle.kts b/sdk/logs/build.gradle.kts index 8ffe760b86f..c0c740d3743 100644 --- a/sdk/logs/build.gradle.kts +++ b/sdk/logs/build.gradle.kts @@ -12,12 +12,30 @@ otelJava.moduleName.set("io.opentelemetry.sdk.logs") dependencies { api(project(":api:all")) api(project(":sdk:common")) - - implementation(project(":api:events")) + compileOnly(project(":api:incubator")) annotationProcessor("com.google.auto.value:auto-value") testImplementation(project(":sdk:testing")) testImplementation("org.awaitility:awaitility") + testImplementation("com.google.guava:guava") +} + +testing { + suites { + register("testIncubating") { + dependencies { + implementation(project(":sdk:testing")) + implementation(project(":api:incubator")) + implementation("com.google.guava:guava") + } + } + } +} + +tasks { + check { + dependsOn(testing.suites) + } } diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/ExtendedSdkLogRecordBuilder.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/ExtendedSdkLogRecordBuilder.java new file mode 100644 index 00000000000..9f874d1f4b3 --- /dev/null +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/ExtendedSdkLogRecordBuilder.java @@ -0,0 +1,91 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.logs; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.incubator.logs.ExtendedLogRecordBuilder; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import java.time.Instant; +import java.util.concurrent.TimeUnit; + +/** SDK implementation of {@link ExtendedLogRecordBuilder}. */ +final class ExtendedSdkLogRecordBuilder extends SdkLogRecordBuilder + implements ExtendedLogRecordBuilder { + + ExtendedSdkLogRecordBuilder( + LoggerSharedState loggerSharedState, InstrumentationScopeInfo instrumentationScopeInfo) { + super(loggerSharedState, instrumentationScopeInfo); + } + + @Override + public ExtendedSdkLogRecordBuilder setEventName(String eventName) { + super.setEventName(eventName); + return this; + } + + @Override + public ExtendedSdkLogRecordBuilder setTimestamp(long timestamp, TimeUnit unit) { + super.setTimestamp(timestamp, unit); + return this; + } + + @Override + public ExtendedSdkLogRecordBuilder setTimestamp(Instant instant) { + super.setTimestamp(instant); + return this; + } + + @Override + public ExtendedSdkLogRecordBuilder setObservedTimestamp(long timestamp, TimeUnit unit) { + super.setObservedTimestamp(timestamp, unit); + return this; + } + + @Override + public ExtendedSdkLogRecordBuilder setObservedTimestamp(Instant instant) { + super.setObservedTimestamp(instant); + return this; + } + + @Override + public ExtendedSdkLogRecordBuilder setContext(Context context) { + super.setContext(context); + return this; + } + + @Override + public ExtendedSdkLogRecordBuilder setSeverity(Severity severity) { + super.setSeverity(severity); + return this; + } + + @Override + public ExtendedSdkLogRecordBuilder setSeverityText(String severityText) { + super.setSeverityText(severityText); + return this; + } + + @Override + public ExtendedSdkLogRecordBuilder setBody(String body) { + super.setBody(body); + return this; + } + + @Override + public ExtendedSdkLogRecordBuilder setBody(Value value) { + super.setBody(value); + return this; + } + + @Override + public ExtendedSdkLogRecordBuilder setAttribute(AttributeKey key, T value) { + super.setAttribute(key, value); + return this; + } +} diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/ExtendedSdkLogger.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/ExtendedSdkLogger.java new file mode 100644 index 00000000000..e3ab57a9711 --- /dev/null +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/ExtendedSdkLogger.java @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.logs; + +import io.opentelemetry.api.incubator.logs.ExtendedLogRecordBuilder; +import io.opentelemetry.api.incubator.logs.ExtendedLogger; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.internal.LoggerConfig; + +/** SDK implementation of {@link ExtendedLogger}. */ +final class ExtendedSdkLogger extends SdkLogger implements ExtendedLogger { + + private final boolean loggerEnabled; + + ExtendedSdkLogger( + LoggerSharedState loggerSharedState, + InstrumentationScopeInfo instrumentationScopeInfo, + LoggerConfig loggerConfig) { + super(loggerSharedState, instrumentationScopeInfo, loggerConfig); + this.loggerEnabled = loggerConfig.isEnabled(); + } + + @Override + public boolean isEnabled() { + return loggerEnabled; + } + + @Override + public ExtendedLogRecordBuilder logRecordBuilder() { + return (ExtendedLogRecordBuilder) super.logRecordBuilder(); + } +} diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/IncubatingUtil.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/IncubatingUtil.java new file mode 100644 index 00000000000..fd7582644be --- /dev/null +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/IncubatingUtil.java @@ -0,0 +1,31 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.logs; + +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.internal.LoggerConfig; + +/** + * Utilities for interacting with {@code io.opentelemetry:opentelemetry-api-incubator}, which is not + * guaranteed to be present on the classpath. For all methods, callers MUST first separately + * reflectively confirm that the incubator is available on the classpath. + */ +final class IncubatingUtil { + + private IncubatingUtil() {} + + static SdkLogger createExtendedLogger( + LoggerSharedState sharedState, + InstrumentationScopeInfo instrumentationScopeInfo, + LoggerConfig tracerConfig) { + return new ExtendedSdkLogger(sharedState, instrumentationScopeInfo, tracerConfig); + } + + static SdkLogRecordBuilder createExtendedLogRecordBuilder( + LoggerSharedState loggerSharedState, InstrumentationScopeInfo instrumentationScopeInfo) { + return new ExtendedSdkLogRecordBuilder(loggerSharedState, instrumentationScopeInfo); + } +} diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/LogLimitsBuilder.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/LogLimitsBuilder.java index 22650d4d896..f4f74bf5ac3 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/LogLimitsBuilder.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/LogLimitsBuilder.java @@ -32,7 +32,7 @@ public final class LogLimitsBuilder { * @throws IllegalArgumentException if {@code maxNumberOfAttributes} is not positive. */ public LogLimitsBuilder setMaxNumberOfAttributes(int maxNumberOfAttributes) { - Utils.checkArgument(maxNumberOfAttributes > 0, "maxNumberOfAttributes must be greater than 0"); + Utils.checkArgument(maxNumberOfAttributes >= 0, "maxNumberOfAttributes must be non-negative"); this.maxNumAttributes = maxNumberOfAttributes; return this; } @@ -48,7 +48,7 @@ public LogLimitsBuilder setMaxNumberOfAttributes(int maxNumberOfAttributes) { */ public LogLimitsBuilder setMaxAttributeValueLength(int maxAttributeValueLength) { Utils.checkArgument( - maxAttributeValueLength > -1, "maxAttributeValueLength must be non-negative"); + maxAttributeValueLength >= 0, "maxAttributeValueLength must be non-negative"); this.maxAttributeValueLength = maxAttributeValueLength; return this; } diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/ReadWriteLogRecord.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/ReadWriteLogRecord.java index 8f6701c3adf..8b69483d2a8 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/ReadWriteLogRecord.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/ReadWriteLogRecord.java @@ -6,7 +6,13 @@ package io.opentelemetry.sdk.logs; import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.logs.data.LogRecordData; +import javax.annotation.Nullable; /** * A log record that can be read from and written to. @@ -25,10 +31,111 @@ public interface ReadWriteLogRecord { // TODO: add additional setters + /** + * Sets attributes to the {@link ReadWriteLogRecord}. If the {@link ReadWriteLogRecord} previously + * contained a mapping for any of the keys, the old values are replaced by the specified values. + * + * @param attributes the attributes + * @return this. + * @since 1.31.0 + */ + @SuppressWarnings("unchecked") + default ReadWriteLogRecord setAllAttributes(Attributes attributes) { + if (attributes == null || attributes.isEmpty()) { + return this; + } + attributes.forEach( + (attributeKey, value) -> this.setAttribute((AttributeKey) attributeKey, value)); + return this; + } + /** Return an immutable {@link LogRecordData} instance representing this log record. */ LogRecordData toLogRecordData(); - // TODO: add additional log record accessors. Currently, all fields can be accessed indirectly via - // #toLogRecordData() at the expense of additional allocations. + // TODO (trask) once event name stabilizes, add getEventName() + + /** + * Returns the value of a given attribute if it exists. This is the equivalent of calling {@code + * getAttributes().get(key)}. + * + * @since 1.46.0 + */ + @Nullable + default T getAttribute(AttributeKey key) { + return toLogRecordData().getAttributes().get(key); + } + + /** + * Returns the instrumentation scope that generated this log. + * + * @since 1.46.0 + */ + default InstrumentationScopeInfo getInstrumentationScopeInfo() { + return toLogRecordData().getInstrumentationScopeInfo(); + } + + /** + * Returns the timestamp at which the log record occurred, in epoch nanos. + * + * @since 1.46.0 + */ + default long getTimestampEpochNanos() { + return toLogRecordData().getTimestampEpochNanos(); + } + + /** + * Returns the timestamp at which the log record was observed, in epoch nanos. + * + * @since 1.46.0 + */ + default long getObservedTimestampEpochNanos() { + return toLogRecordData().getTimestampEpochNanos(); + } + /** + * Return the span context for this log, or {@link SpanContext#getInvalid()} if unset. + * + * @since 1.46.0 + */ + default SpanContext getSpanContext() { + return toLogRecordData().getSpanContext(); + } + + /** + * Returns the severity for this log, or {@link Severity#UNDEFINED_SEVERITY_NUMBER} if unset. + * + * @since 1.46.0 + */ + default Severity getSeverity() { + return toLogRecordData().getSeverity(); + } + + /** + * Returns the severity text for this log, or null if unset. + * + * @since 1.46.0 + */ + @Nullable + default String getSeverityText() { + return toLogRecordData().getSeverityText(); + } + + /** + * Returns the {@link Value} representation of the log body, of null if unset. + * + * @since 1.46.0 + */ + @Nullable + default Value getBodyValue() { + return toLogRecordData().getBodyValue(); + } + + /** + * Returns the attributes for this log, or {@link Attributes#empty()} if unset. + * + * @since 1.46.0 + */ + default Attributes getAttributes() { + return toLogRecordData().getAttributes(); + } } diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLogRecordBuilder.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLogRecordBuilder.java index 8ef8b2ae4b2..da7e1f45e7e 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLogRecordBuilder.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLogRecordBuilder.java @@ -6,30 +6,31 @@ package io.opentelemetry.sdk.logs; import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Value; import io.opentelemetry.api.logs.LogRecordBuilder; import io.opentelemetry.api.logs.Severity; import io.opentelemetry.api.trace.Span; import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.internal.AttributesMap; -import io.opentelemetry.sdk.logs.data.Body; import java.time.Instant; import java.util.concurrent.TimeUnit; import javax.annotation.Nullable; /** SDK implementation of {@link LogRecordBuilder}. */ -final class SdkLogRecordBuilder implements LogRecordBuilder { +class SdkLogRecordBuilder implements LogRecordBuilder { private final LoggerSharedState loggerSharedState; private final LogLimits logLimits; private final InstrumentationScopeInfo instrumentationScopeInfo; + @Nullable private String eventName; private long timestampEpochNanos; private long observedTimestampEpochNanos; @Nullable private Context context; private Severity severity = Severity.UNDEFINED_SEVERITY_NUMBER; @Nullable private String severityText; - private Body body = Body.empty(); + @Nullable private Value body; @Nullable private AttributesMap attributes; SdkLogRecordBuilder( @@ -39,6 +40,12 @@ final class SdkLogRecordBuilder implements LogRecordBuilder { this.instrumentationScopeInfo = instrumentationScopeInfo; } + // accessible via ExtendedSdkLogRecordBuilder + SdkLogRecordBuilder setEventName(String eventName) { + this.eventName = eventName; + return this; + } + @Override public SdkLogRecordBuilder setTimestamp(long timestamp, TimeUnit unit) { this.timestampEpochNanos = unit.toNanos(timestamp); @@ -85,7 +92,12 @@ public SdkLogRecordBuilder setSeverityText(String severityText) { @Override public SdkLogRecordBuilder setBody(String body) { - this.body = Body.string(body); + return setBody(Value.of(body)); + } + + @Override + public SdkLogRecordBuilder setBody(Value value) { + this.body = value; return this; } @@ -121,6 +133,7 @@ public void emit() { loggerSharedState.getLogLimits(), loggerSharedState.getResource(), instrumentationScopeInfo, + eventName, timestampEpochNanos, observedTimestampEpochNanos, Span.fromContext(context).getSpanContext(), diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLogRecordData.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLogRecordData.java index dd77c488f89..eb8e966a7fd 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLogRecordData.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLogRecordData.java @@ -7,11 +7,11 @@ import com.google.auto.value.AutoValue; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.Value; import io.opentelemetry.api.logs.Severity; import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.logs.data.Body; -import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.data.internal.ExtendedLogRecordData; import io.opentelemetry.sdk.resources.Resource; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; @@ -19,19 +19,20 @@ @AutoValue @AutoValue.CopyAnnotations @Immutable -abstract class SdkLogRecordData implements LogRecordData { +abstract class SdkLogRecordData implements ExtendedLogRecordData { SdkLogRecordData() {} static SdkLogRecordData create( Resource resource, InstrumentationScopeInfo instrumentationScopeInfo, + @Nullable String eventName, long epochNanos, long observedEpochNanos, SpanContext spanContext, Severity severity, @Nullable String severityText, - Body body, + @Nullable Value body, Attributes attributes, int totalAttributeCount) { return new AutoValue_SdkLogRecordData( @@ -42,8 +43,26 @@ static SdkLogRecordData create( spanContext, severity, severityText, - body, attributes, - totalAttributeCount); + totalAttributeCount, + body, + eventName); + } + + @Override + @Nullable + public abstract Value getBodyValue(); + + @Override + @Nullable + public abstract String getEventName(); + + @Override + @SuppressWarnings("deprecation") // Implementation of deprecated method + public io.opentelemetry.sdk.logs.data.Body getBody() { + Value valueBody = getBodyValue(); + return valueBody == null + ? io.opentelemetry.sdk.logs.data.Body.empty() + : io.opentelemetry.sdk.logs.data.Body.string(valueBody.asString()); } } diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLogger.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLogger.java index efcb7882ade..8ac51603073 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLogger.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLogger.java @@ -7,23 +7,58 @@ import io.opentelemetry.api.logs.LogRecordBuilder; import io.opentelemetry.api.logs.Logger; +import io.opentelemetry.api.logs.LoggerProvider; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.internal.LoggerConfig; /** SDK implementation of {@link Logger}. */ -final class SdkLogger implements Logger { +class SdkLogger implements Logger { + + private static final Logger NOOP_LOGGER = LoggerProvider.noop().get("noop"); + private static final boolean INCUBATOR_AVAILABLE; + + static { + boolean incubatorAvailable = false; + try { + Class.forName("io.opentelemetry.api.incubator.logs.ExtendedDefaultLoggerProvider"); + incubatorAvailable = true; + } catch (ClassNotFoundException e) { + // Not available + } + INCUBATOR_AVAILABLE = incubatorAvailable; + } private final LoggerSharedState loggerSharedState; private final InstrumentationScopeInfo instrumentationScopeInfo; + private final boolean loggerEnabled; SdkLogger( - LoggerSharedState loggerSharedState, InstrumentationScopeInfo instrumentationScopeInfo) { + LoggerSharedState loggerSharedState, + InstrumentationScopeInfo instrumentationScopeInfo, + LoggerConfig loggerConfig) { this.loggerSharedState = loggerSharedState; this.instrumentationScopeInfo = instrumentationScopeInfo; + this.loggerEnabled = loggerConfig.isEnabled(); + } + + static SdkLogger create( + LoggerSharedState sharedState, + InstrumentationScopeInfo instrumentationScopeInfo, + LoggerConfig loggerConfig) { + return INCUBATOR_AVAILABLE + ? IncubatingUtil.createExtendedLogger(sharedState, instrumentationScopeInfo, loggerConfig) + : new SdkLogger(sharedState, instrumentationScopeInfo, loggerConfig); } @Override public LogRecordBuilder logRecordBuilder() { - return new SdkLogRecordBuilder(loggerSharedState, instrumentationScopeInfo); + if (loggerEnabled) { + return INCUBATOR_AVAILABLE + ? IncubatingUtil.createExtendedLogRecordBuilder( + loggerSharedState, instrumentationScopeInfo) + : new SdkLogRecordBuilder(loggerSharedState, instrumentationScopeInfo); + } + return NOOP_LOGGER.logRecordBuilder(); } // VisibleForTesting diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLoggerProvider.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLoggerProvider.java index 9b9b2e1a97d..ea68a6c2a5c 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLoggerProvider.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLoggerProvider.java @@ -11,7 +11,10 @@ import io.opentelemetry.api.logs.LoggerProvider; import io.opentelemetry.sdk.common.Clock; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.internal.ComponentRegistry; +import io.opentelemetry.sdk.internal.ScopeConfigurator; +import io.opentelemetry.sdk.logs.internal.LoggerConfig; import io.opentelemetry.sdk.resources.Resource; import java.io.Closeable; import java.util.List; @@ -33,6 +36,7 @@ public final class SdkLoggerProvider implements LoggerProvider, Closeable { private final LoggerSharedState sharedState; private final ComponentRegistry loggerComponentRegistry; + private final ScopeConfigurator loggerConfigurator; private final boolean isNoopLogRecordProcessor; /** @@ -48,16 +52,27 @@ public static SdkLoggerProviderBuilder builder() { Resource resource, Supplier logLimitsSupplier, List processors, - Clock clock) { + Clock clock, + ScopeConfigurator loggerConfigurator) { LogRecordProcessor logRecordProcessor = LogRecordProcessor.composite(processors); this.sharedState = new LoggerSharedState(resource, logLimitsSupplier, logRecordProcessor, clock); this.loggerComponentRegistry = new ComponentRegistry<>( - instrumentationScopeInfo -> new SdkLogger(sharedState, instrumentationScopeInfo)); + instrumentationScopeInfo -> + SdkLogger.create( + sharedState, + instrumentationScopeInfo, + getLoggerConfig(instrumentationScopeInfo))); + this.loggerConfigurator = loggerConfigurator; this.isNoopLogRecordProcessor = logRecordProcessor instanceof NoopLogRecordProcessor; } + private LoggerConfig getLoggerConfig(InstrumentationScopeInfo instrumentationScopeInfo) { + LoggerConfig loggerConfig = loggerConfigurator.apply(instrumentationScopeInfo); + return loggerConfig == null ? LoggerConfig.defaultConfig() : loggerConfig; + } + @Override public Logger get(String instrumentationScopeName) { return loggerComponentRegistry.get( diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLoggerProviderBuilder.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLoggerProviderBuilder.java index 8d7004eeabd..94d990c8a4b 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLoggerProviderBuilder.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkLoggerProviderBuilder.java @@ -11,11 +11,17 @@ import io.opentelemetry.api.logs.Logger; import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.Clock; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; +import io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder; import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.internal.LoggerConfig; +import io.opentelemetry.sdk.logs.internal.SdkLoggerProviderUtil; import io.opentelemetry.sdk.resources.Resource; import java.util.ArrayList; import java.util.List; import java.util.Objects; +import java.util.function.Predicate; import java.util.function.Supplier; /** @@ -29,6 +35,8 @@ public final class SdkLoggerProviderBuilder { private Resource resource = Resource.getDefault(); private Supplier logLimitsSupplier = LogLimits::getDefault; private Clock clock = Clock.getDefault(); + private ScopeConfiguratorBuilder loggerConfiguratorBuilder = + LoggerConfig.configuratorBuilder(); SdkLoggerProviderBuilder() {} @@ -100,12 +108,54 @@ public SdkLoggerProviderBuilder setClock(Clock clock) { return this; } + /** + * Set the logger configurator, which computes {@link LoggerConfig} for each {@link + * InstrumentationScopeInfo}. + * + *

This method is experimental so not public. You may reflectively call it using {@link + * SdkLoggerProviderUtil#setLoggerConfigurator(SdkLoggerProviderBuilder, ScopeConfigurator)}. + * + *

Overrides any matchers added via {@link #addLoggerConfiguratorCondition(Predicate, + * LoggerConfig)}. + * + * @see LoggerConfig#configuratorBuilder() + */ + SdkLoggerProviderBuilder setLoggerConfigurator( + ScopeConfigurator loggerConfigurator) { + this.loggerConfiguratorBuilder = loggerConfigurator.toBuilder(); + return this; + } + + /** + * Adds a condition to the logger configurator, which computes {@link LoggerConfig} for each + * {@link InstrumentationScopeInfo}. + * + *

This method is experimental so not public. You may reflectively call it using {@link + * SdkLoggerProviderUtil#addLoggerConfiguratorCondition(SdkLoggerProviderBuilder, Predicate, + * LoggerConfig)}. + * + *

Applies after any previously added conditions. + * + *

If {@link #setLoggerConfigurator(ScopeConfigurator)} was previously called, this condition + * will only be applied if the {@link ScopeConfigurator#apply(Object)} returns null for the + * matched {@link InstrumentationScopeInfo}(s). + * + * @see ScopeConfiguratorBuilder#nameEquals(String) + * @see ScopeConfiguratorBuilder#nameMatchesGlob(String) + */ + SdkLoggerProviderBuilder addLoggerConfiguratorCondition( + Predicate scopeMatcher, LoggerConfig loggerConfig) { + this.loggerConfiguratorBuilder.addCondition(scopeMatcher, loggerConfig); + return this; + } + /** * Create a {@link SdkLoggerProvider} instance. * * @return an instance configured with the provided options */ public SdkLoggerProvider build() { - return new SdkLoggerProvider(resource, logLimitsSupplier, logRecordProcessors, clock); + return new SdkLoggerProvider( + resource, logLimitsSupplier, logRecordProcessors, clock, loggerConfiguratorBuilder.build()); } } diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkReadWriteLogRecord.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkReadWriteLogRecord.java index c237edff511..ad6197d8cf3 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkReadWriteLogRecord.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/SdkReadWriteLogRecord.java @@ -7,12 +7,12 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.Value; import io.opentelemetry.api.internal.GuardedBy; import io.opentelemetry.api.logs.Severity; import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.internal.AttributesMap; -import io.opentelemetry.sdk.logs.data.Body; import io.opentelemetry.sdk.logs.data.LogRecordData; import io.opentelemetry.sdk.resources.Resource; import javax.annotation.Nullable; @@ -24,12 +24,13 @@ class SdkReadWriteLogRecord implements ReadWriteLogRecord { private final LogLimits logLimits; private final Resource resource; private final InstrumentationScopeInfo instrumentationScopeInfo; + @Nullable private final String eventName; private final long timestampEpochNanos; private final long observedTimestampEpochNanos; private final SpanContext spanContext; private final Severity severity; @Nullable private final String severityText; - private final Body body; + @Nullable private final Value body; private final Object lock = new Object(); @GuardedBy("lock") @@ -40,16 +41,18 @@ private SdkReadWriteLogRecord( LogLimits logLimits, Resource resource, InstrumentationScopeInfo instrumentationScopeInfo, + @Nullable String eventName, long timestampEpochNanos, long observedTimestampEpochNanos, SpanContext spanContext, Severity severity, @Nullable String severityText, - Body body, + @Nullable Value body, @Nullable AttributesMap attributes) { this.logLimits = logLimits; this.resource = resource; this.instrumentationScopeInfo = instrumentationScopeInfo; + this.eventName = eventName; this.timestampEpochNanos = timestampEpochNanos; this.observedTimestampEpochNanos = observedTimestampEpochNanos; this.spanContext = spanContext; @@ -64,17 +67,19 @@ static SdkReadWriteLogRecord create( LogLimits logLimits, Resource resource, InstrumentationScopeInfo instrumentationScopeInfo, + @Nullable String eventName, long timestampEpochNanos, long observedTimestampEpochNanos, SpanContext spanContext, Severity severity, @Nullable String severityText, - Body body, + @Nullable Value body, @Nullable AttributesMap attributes) { return new SdkReadWriteLogRecord( logLimits, resource, instrumentationScopeInfo, + eventName, timestampEpochNanos, observedTimestampEpochNanos, spanContext, @@ -115,6 +120,7 @@ public LogRecordData toLogRecordData() { return SdkLogRecordData.create( resource, instrumentationScopeInfo, + eventName, timestampEpochNanos, observedTimestampEpochNanos, spanContext, @@ -125,4 +131,57 @@ public LogRecordData toLogRecordData() { attributes == null ? 0 : attributes.getTotalAddedValues()); } } + + @Override + public InstrumentationScopeInfo getInstrumentationScopeInfo() { + return instrumentationScopeInfo; + } + + @Override + public long getTimestampEpochNanos() { + return timestampEpochNanos; + } + + @Override + public long getObservedTimestampEpochNanos() { + return observedTimestampEpochNanos; + } + + @Override + public SpanContext getSpanContext() { + return spanContext; + } + + @Override + public Severity getSeverity() { + return severity; + } + + @Nullable + @Override + public String getSeverityText() { + return severityText; + } + + @Nullable + @Override + public Value getBodyValue() { + return body; + } + + @Override + public Attributes getAttributes() { + return getImmutableAttributes(); + } + + @Nullable + @Override + public T getAttribute(AttributeKey key) { + synchronized (lock) { + if (attributes == null || attributes.isEmpty()) { + return null; + } + return attributes.get(key); + } + } } diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/Body.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/Body.java index a13ecc003fe..fda485f5eaf 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/Body.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/Body.java @@ -5,6 +5,8 @@ package io.opentelemetry.sdk.logs.data; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.common.ValueType; import javax.annotation.concurrent.Immutable; /** @@ -14,11 +16,18 @@ * log data model. * * @since 1.27.0 + * @deprecated Use {@link LogRecordData#getBodyValue()} and {@link Value}. */ @Immutable +@Deprecated public interface Body { - /** An enum that represents all the possible value types for an {@code Body}. */ + /** + * An enum that represents all the possible value types for an {@code Body}. + * + * @deprecated Use {@link Value#getType()}. + */ + @Deprecated enum Type { EMPTY, STRING @@ -43,9 +52,20 @@ static Body empty() { return EmptyBody.INSTANCE; } - /** Returns the String value of this {@code Body}. */ + /** + * Returns the String value of this {@code Body}. + * + *

If the log record body is some {@link ValueType} other than {@link ValueType#STRING}, this + * returns {@link Value#asString()}. Consumers should use {@link LogRecordData#getBodyValue()} + * instead. + */ String asString(); - /** Returns the type of the {@code Body}. */ + /** + * Returns the type of the {@code Body}. + * + * @deprecated Use {@link Value#getType()}. + */ + @Deprecated Type getType(); } diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/EmptyBody.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/EmptyBody.java index c70e866ee78..921895e6125 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/EmptyBody.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/EmptyBody.java @@ -5,6 +5,7 @@ package io.opentelemetry.sdk.logs.data; +@SuppressWarnings("deprecation") // Implementation of deprecated Body enum EmptyBody implements Body { INSTANCE; diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/LogRecordData.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/LogRecordData.java index eb2f8dcde81..f21b175f52f 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/LogRecordData.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/LogRecordData.java @@ -6,6 +6,8 @@ package io.opentelemetry.sdk.logs.data; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.common.ValueType; import io.opentelemetry.api.logs.Severity; import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; @@ -46,9 +48,29 @@ public interface LogRecordData { @Nullable String getSeverityText(); - /** Returns the body for this log, or {@link Body#empty()} if unset. */ + /** + * Returns the body for this log, or {@link Body#empty()} if unset. + * + *

If the body has been set to some {@link ValueType} other than {@link ValueType#STRING}, this + * will return a {@link Body} with a string representation of the {@link Value}. + * + * @deprecated Use {@link #getBodyValue()} instead. + */ + @Deprecated Body getBody(); + /** + * Returns the {@link Value} representation of the log body, of null if unset. + * + * @since 1.42.0 + */ + @Nullable + @SuppressWarnings("deprecation") // Default impl uses deprecated code for backwards compatibility + default Value getBodyValue() { + Body body = getBody(); + return body.getType() == Body.Type.EMPTY ? null : Value.of(body.asString()); + } + /** Returns the attributes for this log, or {@link Attributes#empty()} if unset. */ Attributes getAttributes(); diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/StringBody.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/StringBody.java index 4496528f7f0..a08dcab55fa 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/StringBody.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/StringBody.java @@ -10,6 +10,7 @@ @Immutable @AutoValue +@SuppressWarnings("deprecation") // Implementation of deprecated Body abstract class StringBody implements Body { StringBody() {} diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/internal/ExtendedLogRecordData.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/internal/ExtendedLogRecordData.java new file mode 100644 index 00000000000..61e48eb7821 --- /dev/null +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/data/internal/ExtendedLogRecordData.java @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.logs.data.internal; + +import io.opentelemetry.sdk.logs.data.LogRecordData; +import javax.annotation.Nullable; + +/** + * This class is internal and experimental. Its APIs are unstable and can change at any time. Its + * APIs (or a version of them) may be promoted to the public stable API in the future, but no + * guarantees are made. + */ +public interface ExtendedLogRecordData extends LogRecordData { + + @Nullable + String getEventName(); +} diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/export/BatchLogRecordProcessor.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/export/BatchLogRecordProcessor.java index 26ac5005574..601bfdfa208 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/export/BatchLogRecordProcessor.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/export/BatchLogRecordProcessor.java @@ -45,7 +45,7 @@ public final class BatchLogRecordProcessor implements LogRecordProcessor { private static final String WORKER_THREAD_NAME = BatchLogRecordProcessor.class.getSimpleName() + "_WorkerThread"; private static final AttributeKey LOG_RECORD_PROCESSOR_TYPE_LABEL = - AttributeKey.stringKey("logRecordProcessorType"); + AttributeKey.stringKey("processorType"); private static final AttributeKey LOG_RECORD_PROCESSOR_DROPPED_LABEL = AttributeKey.booleanKey("dropped"); private static final String LOG_RECORD_PROCESSOR_TYPE_VALUE = @@ -105,6 +105,15 @@ public CompletableResultCode forceFlush() { return worker.forceFlush(); } + /** + * Return the processor's configured {@link LogRecordExporter}. + * + * @since 1.37.0 + */ + public LogRecordExporter getLogRecordExporter() { + return worker.logRecordExporter; + } + // Visible for testing List getBatch() { return worker.batch; @@ -172,7 +181,7 @@ private Worker( meter .gaugeBuilder("queueSize") .ofLongs() - .setDescription("The number of logs queued") + .setDescription("The number of items queued") .setUnit("1") .buildWithCallback( result -> diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/export/BatchLogRecordProcessorBuilder.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/export/BatchLogRecordProcessorBuilder.java index 6aafa9525aa..5e17848c774 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/export/BatchLogRecordProcessorBuilder.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/export/BatchLogRecordProcessorBuilder.java @@ -11,6 +11,8 @@ import io.opentelemetry.api.metrics.MeterProvider; import java.time.Duration; import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; /** * Builder class for {@link BatchLogRecordProcessor}. @@ -18,6 +20,8 @@ * @since 1.27.0 */ public final class BatchLogRecordProcessorBuilder { + private static final Logger logger = + Logger.getLogger(BatchLogRecordProcessorBuilder.class.getName()); // Visible for testing static final long DEFAULT_SCHEDULE_DELAY_MILLIS = 1000; @@ -71,7 +75,7 @@ long getScheduleDelayNanos() { public BatchLogRecordProcessorBuilder setExporterTimeout(long timeout, TimeUnit unit) { requireNonNull(unit, "unit"); checkArgument(timeout >= 0, "timeout must be non-negative"); - exporterTimeoutNanos = unit.toNanos(timeout); + exporterTimeoutNanos = timeout == 0 ? Long.MAX_VALUE : unit.toNanos(timeout); return this; } @@ -98,9 +102,11 @@ long getExporterTimeoutNanos() { * @param maxQueueSize the maximum number of Logs that are kept in the queue before start * dropping. * @return this. + * @throws IllegalArgumentException if {@code maxQueueSize} is not positive. * @see BatchLogRecordProcessorBuilder#DEFAULT_MAX_QUEUE_SIZE */ public BatchLogRecordProcessorBuilder setMaxQueueSize(int maxQueueSize) { + checkArgument(maxQueueSize > 0, "maxQueueSize must be positive."); this.maxQueueSize = maxQueueSize; return this; } @@ -148,6 +154,13 @@ int getMaxExportBatchSize() { * @return a new {@link BatchLogRecordProcessor}. */ public BatchLogRecordProcessor build() { + if (maxExportBatchSize > maxQueueSize) { + logger.log( + Level.WARNING, + "maxExportBatchSize should not exceed maxQueueSize. Setting maxExportBatchSize to {0} instead of {1}", + new Object[] {maxQueueSize, maxExportBatchSize}); + maxExportBatchSize = maxQueueSize; + } return new BatchLogRecordProcessor( logRecordExporter, meterProvider, diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/export/SimpleLogRecordProcessor.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/export/SimpleLogRecordProcessor.java index 9b5c4791ef4..cc75b50ae1f 100644 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/export/SimpleLogRecordProcessor.java +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/export/SimpleLogRecordProcessor.java @@ -41,6 +41,8 @@ public final class SimpleLogRecordProcessor implements LogRecordProcessor { Collections.newSetFromMap(new ConcurrentHashMap<>()); private final AtomicBoolean isShutdown = new AtomicBoolean(false); + private final Object exporterLock = new Object(); + /** * Returns a new {@link SimpleLogRecordProcessor} which exports logs to the {@link * LogRecordExporter} synchronously. @@ -64,7 +66,12 @@ private SimpleLogRecordProcessor(LogRecordExporter logRecordExporter) { public void onEmit(Context context, ReadWriteLogRecord logRecord) { try { List logs = Collections.singletonList(logRecord.toLogRecordData()); - CompletableResultCode result = logRecordExporter.export(logs); + CompletableResultCode result; + + synchronized (exporterLock) { + result = logRecordExporter.export(logs); + } + pendingExports.add(result); result.whenComplete( () -> { @@ -107,6 +114,15 @@ public CompletableResultCode forceFlush() { return CompletableResultCode.ofAll(pendingExports); } + /** + * Return the processor's configured {@link LogRecordExporter}. + * + * @since 1.37.0 + */ + public LogRecordExporter getLogRecordExporter() { + return logRecordExporter; + } + @Override public String toString() { return "SimpleLogRecordProcessor{" + "logRecordExporter=" + logRecordExporter + '}'; diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/internal/LoggerConfig.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/internal/LoggerConfig.java new file mode 100644 index 00000000000..00ffdc86b41 --- /dev/null +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/internal/LoggerConfig.java @@ -0,0 +1,67 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.logs.internal; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.api.logs.Logger; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; +import io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder; +import io.opentelemetry.sdk.logs.SdkLoggerProviderBuilder; +import java.util.function.Predicate; +import javax.annotation.concurrent.Immutable; + +/** + * A collection of configuration options which define the behavior of a {@link Logger}. + * + *

This class is internal and experimental. Its APIs are unstable and can change at any time. Its + * APIs (or a version of them) may be promoted to the public stable API in the future, but no + * guarantees are made. + * + * @see SdkLoggerProviderUtil#setLoggerConfigurator(SdkLoggerProviderBuilder, ScopeConfigurator) + * @see SdkLoggerProviderUtil#addLoggerConfiguratorCondition(SdkLoggerProviderBuilder, Predicate, + * LoggerConfig) + */ +@AutoValue +@Immutable +public abstract class LoggerConfig { + + private static final LoggerConfig DEFAULT_CONFIG = + new AutoValue_LoggerConfig(/* enabled= */ true); + private static final LoggerConfig DISABLED_CONFIG = + new AutoValue_LoggerConfig(/* enabled= */ false); + + /** Returns a disabled {@link LoggerConfig}. */ + public static LoggerConfig disabled() { + return DISABLED_CONFIG; + } + + /** Returns an enabled {@link LoggerConfig}. */ + public static LoggerConfig enabled() { + return DEFAULT_CONFIG; + } + + /** + * Returns the default {@link LoggerConfig}, which is used when no configurator is set or when the + * logger configurator returns {@code null} for a {@link InstrumentationScopeInfo}. + */ + public static LoggerConfig defaultConfig() { + return DEFAULT_CONFIG; + } + + /** + * Create a {@link ScopeConfiguratorBuilder} for configuring {@link + * SdkLoggerProviderUtil#setLoggerConfigurator(SdkLoggerProviderBuilder, ScopeConfigurator)}. + */ + public static ScopeConfiguratorBuilder configuratorBuilder() { + return ScopeConfigurator.builder(); + } + + LoggerConfig() {} + + /** Returns {@code true} if this logger is enabled. Defaults to {@code true}. */ + public abstract boolean isEnabled(); +} diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/internal/SdkEventEmitterProvider.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/internal/SdkEventEmitterProvider.java deleted file mode 100644 index 18d1e434824..00000000000 --- a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/internal/SdkEventEmitterProvider.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.logs.internal; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.events.EventEmitter; -import io.opentelemetry.api.events.EventEmitterBuilder; -import io.opentelemetry.api.events.EventEmitterProvider; -import io.opentelemetry.api.logs.Logger; -import io.opentelemetry.api.logs.LoggerBuilder; -import io.opentelemetry.api.logs.LoggerProvider; -import io.opentelemetry.sdk.common.Clock; -import java.util.concurrent.TimeUnit; - -/** - * SDK implementation for {@link EventEmitterProvider}. - * - *

Delegates all calls to the configured {@link LoggerProvider}, and its {@link LoggerBuilder}s, - * {@link Logger}s. - */ -public final class SdkEventEmitterProvider implements EventEmitterProvider { - - private static final String DEFAULT_EVENT_DOMAIN = "unknown"; - - private final LoggerProvider delegateLoggerProvider; - private final Clock clock; - - private SdkEventEmitterProvider(LoggerProvider delegateLoggerProvider, Clock clock) { - this.delegateLoggerProvider = delegateLoggerProvider; - this.clock = clock; - } - - /** - * Create a {@link SdkEventEmitterProvider} which delegates to the {@code delegateLoggerProvider}. - */ - public static SdkEventEmitterProvider create(LoggerProvider delegateLoggerProvider) { - return new SdkEventEmitterProvider(delegateLoggerProvider, Clock.getDefault()); - } - - /** - * Create a {@link SdkEventEmitterProvider} which delegates to the {@code delegateLoggerProvider}. - */ - public static SdkEventEmitterProvider create(LoggerProvider delegateLoggerProvider, Clock clock) { - return new SdkEventEmitterProvider(delegateLoggerProvider, clock); - } - - @Override - public EventEmitter get(String instrumentationScopeName) { - return eventEmitterBuilder(instrumentationScopeName) - .setEventDomain(DEFAULT_EVENT_DOMAIN) - .build(); - } - - @Override - public EventEmitterBuilder eventEmitterBuilder(String instrumentationScopeName) { - return new SdkEventEmitterBuilder( - clock, delegateLoggerProvider.loggerBuilder(instrumentationScopeName)); - } - - private static class SdkEventEmitterBuilder implements EventEmitterBuilder { - - private final Clock clock; - private final LoggerBuilder delegateLoggerBuilder; - private String eventDomain = DEFAULT_EVENT_DOMAIN; - - private SdkEventEmitterBuilder(Clock clock, LoggerBuilder delegateLoggerBuilder) { - this.clock = clock; - this.delegateLoggerBuilder = delegateLoggerBuilder; - } - - @Override - public EventEmitterBuilder setEventDomain(String eventDomain) { - this.eventDomain = eventDomain; - return this; - } - - @Override - public EventEmitterBuilder setSchemaUrl(String schemaUrl) { - delegateLoggerBuilder.setSchemaUrl(schemaUrl); - return this; - } - - @Override - public EventEmitterBuilder setInstrumentationVersion(String instrumentationScopeVersion) { - delegateLoggerBuilder.setInstrumentationVersion(instrumentationScopeVersion); - return this; - } - - @Override - public EventEmitter build() { - return new SdkEventEmitter(clock, delegateLoggerBuilder.build(), eventDomain); - } - } - - private static class SdkEventEmitter implements EventEmitter { - - private static final AttributeKey EVENT_DOMAIN = AttributeKey.stringKey("event.domain"); - private static final AttributeKey EVENT_NAME = AttributeKey.stringKey("event.name"); - - private final Clock clock; - private final Logger delegateLogger; - private final String eventDomain; - - private SdkEventEmitter(Clock clock, Logger delegateLogger, String eventDomain) { - this.clock = clock; - this.delegateLogger = delegateLogger; - this.eventDomain = eventDomain; - } - - @Override - public void emit(String eventName, Attributes attributes) { - delegateLogger - .logRecordBuilder() - .setTimestamp(clock.now(), TimeUnit.NANOSECONDS) - .setAllAttributes(attributes) - .setAttribute(EVENT_DOMAIN, eventDomain) - .setAttribute(EVENT_NAME, eventName) - .emit(); - } - } -} diff --git a/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/internal/SdkLoggerProviderUtil.java b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/internal/SdkLoggerProviderUtil.java new file mode 100644 index 00000000000..eb4fbb4ec29 --- /dev/null +++ b/sdk/logs/src/main/java/io/opentelemetry/sdk/logs/internal/SdkLoggerProviderUtil.java @@ -0,0 +1,58 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.logs.internal; + +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; +import io.opentelemetry.sdk.logs.SdkLoggerProviderBuilder; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.function.Predicate; + +/** + * A collection of methods that allow use of experimental features prior to availability in public + * APIs. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public final class SdkLoggerProviderUtil { + + private SdkLoggerProviderUtil() {} + + /** Reflectively set the {@link ScopeConfigurator} to the {@link SdkLoggerProviderBuilder}. */ + public static void setLoggerConfigurator( + SdkLoggerProviderBuilder sdkLoggerProviderBuilder, + ScopeConfigurator loggerConfigurator) { + try { + Method method = + SdkLoggerProviderBuilder.class.getDeclaredMethod( + "setLoggerConfigurator", ScopeConfigurator.class); + method.setAccessible(true); + method.invoke(sdkLoggerProviderBuilder, loggerConfigurator); + } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { + throw new IllegalStateException( + "Error calling setLoggerConfigurator on SdkLoggerProviderBuilder", e); + } + } + + /** Reflectively add a logger configurator condition to the {@link SdkLoggerProviderBuilder}. */ + public static void addLoggerConfiguratorCondition( + SdkLoggerProviderBuilder sdkLoggerProviderBuilder, + Predicate scopeMatcher, + LoggerConfig loggerConfig) { + try { + Method method = + SdkLoggerProviderBuilder.class.getDeclaredMethod( + "addLoggerConfiguratorCondition", Predicate.class, LoggerConfig.class); + method.setAccessible(true); + method.invoke(sdkLoggerProviderBuilder, scopeMatcher, loggerConfig); + } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { + throw new IllegalStateException( + "Error calling addLoggerConfiguratorCondition on SdkLoggerProviderBuilder", e); + } + } +} diff --git a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/LogLimitsTest.java b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/LogLimitsTest.java index 250d49f8402..4acb0666bab 100644 --- a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/LogLimitsTest.java +++ b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/LogLimitsTest.java @@ -6,6 +6,7 @@ package io.opentelemetry.sdk.logs; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; import org.junit.jupiter.api.Test; @@ -33,11 +34,17 @@ void updateLogLimits_All() { @Test void invalidLogLimits() { - assertThatThrownBy(() -> LogLimits.builder().setMaxNumberOfAttributes(0)) - .isInstanceOf(IllegalArgumentException.class); assertThatThrownBy(() -> LogLimits.builder().setMaxNumberOfAttributes(-1)) .isInstanceOf(IllegalArgumentException.class); assertThatThrownBy(() -> LogLimits.builder().setMaxAttributeValueLength(-1)) .isInstanceOf(IllegalArgumentException.class); } + + @Test + void validLogLimits() { + assertThatCode(() -> LogLimits.builder().setMaxNumberOfAttributes(0)) + .doesNotThrowAnyException(); + assertThatCode(() -> LogLimits.builder().setMaxAttributeValueLength(0)) + .doesNotThrowAnyException(); + } } diff --git a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/ReadWriteLogRecordTest.java b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/ReadWriteLogRecordTest.java new file mode 100644 index 00000000000..95215ee57db --- /dev/null +++ b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/ReadWriteLogRecordTest.java @@ -0,0 +1,74 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.logs; + +import static io.opentelemetry.api.common.AttributeKey.stringKey; +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.AttributesMap; +import io.opentelemetry.sdk.resources.Resource; +import org.junit.jupiter.api.Test; + +class ReadWriteLogRecordTest { + + @Test + void addAllAttributes() { + Attributes newAttributes = Attributes.of(stringKey("foo"), "bar", stringKey("bar"), "buzz"); + SdkReadWriteLogRecord logRecord = buildLogRecord(); + + logRecord.setAllAttributes(newAttributes); + + Attributes result = logRecord.getAttributes(); + assertThat(result.get(stringKey("foo"))).isEqualTo("bar"); + assertThat(result.get(stringKey("bar"))).isEqualTo("buzz"); + assertThat(result.get(stringKey("untouched"))).isEqualTo("yes"); + } + + @Test + void addAllHandlesNull() { + SdkReadWriteLogRecord logRecord = buildLogRecord(); + Attributes originalAttributes = logRecord.getAttributes(); + ReadWriteLogRecord result = logRecord.setAllAttributes(null); + assertThat(result.getAttributes()).isEqualTo(originalAttributes); + } + + @Test + void allHandlesEmpty() { + SdkReadWriteLogRecord logRecord = buildLogRecord(); + Attributes originalAttributes = logRecord.getAttributes(); + ReadWriteLogRecord result = logRecord.setAllAttributes(Attributes.empty()); + assertThat(result.getAttributes()).isEqualTo(originalAttributes); + } + + SdkReadWriteLogRecord buildLogRecord() { + Value body = Value.of("bod"); + AttributesMap initialAttributes = AttributesMap.create(100, 200); + initialAttributes.put(stringKey("foo"), "aaiosjfjioasdiojfjioasojifja"); + initialAttributes.put(stringKey("untouched"), "yes"); + LogLimits limits = LogLimits.getDefault(); + Resource resource = Resource.empty(); + InstrumentationScopeInfo scope = InstrumentationScopeInfo.create("test"); + SpanContext spanContext = SpanContext.getInvalid(); + + return SdkReadWriteLogRecord.create( + limits, + resource, + scope, + "event name", + 0L, + 0L, + spanContext, + Severity.DEBUG, + "buggin", + body, + initialAttributes); + } +} diff --git a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/SdkLogRecordBuilderTest.java b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/SdkLogRecordBuilderTest.java index ff191032265..202f73917f1 100644 --- a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/SdkLogRecordBuilderTest.java +++ b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/SdkLogRecordBuilderTest.java @@ -5,11 +5,17 @@ package io.opentelemetry.sdk.logs; +import static io.opentelemetry.api.common.AttributeKey.booleanKey; +import static io.opentelemetry.api.common.AttributeKey.doubleKey; +import static io.opentelemetry.api.common.AttributeKey.longKey; +import static io.opentelemetry.api.common.AttributeKey.stringKey; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; import static org.mockito.Mockito.when; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.Value; import io.opentelemetry.api.logs.Severity; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanContext; @@ -18,7 +24,6 @@ import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.Clock; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.logs.data.Body; import io.opentelemetry.sdk.resources.Resource; import java.time.Instant; import java.util.concurrent.TimeUnit; @@ -60,6 +65,7 @@ void emit_AllFields() { Instant timestamp = Instant.now(); Instant observedTimestamp = Instant.now().plusNanos(100); + String eventName = "event name"; String bodyStr = "body"; String sevText = "sevText"; Severity severity = Severity.DEBUG3; @@ -70,12 +76,13 @@ void emit_AllFields() { TraceFlags.getSampled(), TraceState.getDefault()); + builder.setEventName(eventName); builder.setBody(bodyStr); builder.setTimestamp(123, TimeUnit.SECONDS); builder.setTimestamp(timestamp); builder.setObservedTimestamp(456, TimeUnit.SECONDS); builder.setObservedTimestamp(observedTimestamp); - builder.setAttribute(null, null); + builder.setAttribute((String) null, (String) null); builder.setAttribute(AttributeKey.stringKey("k1"), "v1"); builder.setAllAttributes(Attributes.builder().put("k2", "v2").put("k3", "v3").build()); builder.setContext(Span.wrap(spanContext).storeInContext(Context.root())); @@ -85,6 +92,8 @@ void emit_AllFields() { assertThat(emittedLog.get().toLogRecordData()) .hasResource(RESOURCE) .hasInstrumentationScope(SCOPE_INFO) + // TODO (trask) once event name stabilizes + // .hasEventName(eventName) .hasBody(bodyStr) .hasTimestamp(TimeUnit.SECONDS.toNanos(timestamp.getEpochSecond()) + timestamp.getNano()) .hasObservedTimestamp( @@ -105,11 +114,29 @@ void emit_NoFields() { assertThat(emittedLog.get().toLogRecordData()) .hasResource(RESOURCE) .hasInstrumentationScope(SCOPE_INFO) - .hasBody(Body.empty().asString()) + .hasBody((Value) null) .hasTimestamp(0L) .hasObservedTimestamp(10L) .hasAttributes(Attributes.empty()) .hasSpanContext(SpanContext.getInvalid()) .hasSeverity(Severity.UNDEFINED_SEVERITY_NUMBER); } + + @Test + void testConvenienceAttributeMethods() { + builder + .setAttribute("foo", "bar") + .setAttribute("lk", 12L) + .setAttribute("dk", 12.123) + .setAttribute("bk", true) + .setAttribute("ik", 13) + .emit(); + assertThat(emittedLog.get().toLogRecordData()) + .hasAttributesSatisfyingExactly( + equalTo(stringKey("foo"), "bar"), + equalTo(longKey("lk"), 12L), + equalTo(doubleKey("dk"), 12.123), + equalTo(booleanKey("bk"), true), + equalTo(longKey("ik"), 13L)); + } } diff --git a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/SdkLoggerProviderTest.java b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/SdkLoggerProviderTest.java index 05da79ee3eb..63063731ac4 100644 --- a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/SdkLoggerProviderTest.java +++ b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/SdkLoggerProviderTest.java @@ -246,6 +246,8 @@ void loggerBuilder_WithLogRecordProcessor() { sdkLoggerProvider .get("test") .logRecordBuilder() + // TODO (trask) once event name stabilizes + // .setEventName("event name") .setTimestamp(100, TimeUnit.NANOSECONDS) .setContext(Span.wrap(spanContext).storeInContext(Context.root())) .setSeverity(Severity.DEBUG) @@ -258,6 +260,8 @@ void loggerBuilder_WithLogRecordProcessor() { assertThat(logRecordData.get()) .hasResource(resource) .hasInstrumentationScope(InstrumentationScopeInfo.create("test")) + // TODO (trask) once event name stabilizes + // .hasEventName("event name") .hasTimestamp(100) .hasSpanContext(spanContext) .hasSeverity(Severity.DEBUG) diff --git a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/SdkLoggerTest.java b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/SdkLoggerTest.java index f8246dfc0a9..2ea1ee19291 100644 --- a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/SdkLoggerTest.java +++ b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/SdkLoggerTest.java @@ -24,6 +24,7 @@ import io.opentelemetry.sdk.common.Clock; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.internal.LoggerConfig; import io.opentelemetry.sdk.resources.Resource; import java.util.Arrays; import java.util.concurrent.TimeUnit; @@ -44,7 +45,7 @@ void logRecordBuilder() { when(state.getLogRecordProcessor()).thenReturn(logRecordProcessor); when(state.getClock()).thenReturn(clock); - SdkLogger logger = new SdkLogger(state, info); + SdkLogger logger = new SdkLogger(state, info, LoggerConfig.defaultConfig()); LogRecordBuilder logRecordBuilder = logger.logRecordBuilder(); logRecordBuilder.setBody("foo"); diff --git a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/ValueBodyTest.java b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/ValueBodyTest.java new file mode 100644 index 00000000000..954ba6bdb63 --- /dev/null +++ b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/ValueBodyTest.java @@ -0,0 +1,169 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.logs; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; + +import io.opentelemetry.api.common.KeyValue; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.common.ValueType; +import io.opentelemetry.api.logs.Logger; +import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor; +import io.opentelemetry.sdk.testing.exporter.InMemoryLogRecordExporter; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.LinkedHashMap; +import org.junit.jupiter.api.Test; + +class ValueBodyTest { + + @Test + @SuppressWarnings("DoubleBraceInitialization") + void valueBody() { + InMemoryLogRecordExporter exporter = InMemoryLogRecordExporter.create(); + SdkLoggerProvider provider = + SdkLoggerProvider.builder() + .addLogRecordProcessor(SimpleLogRecordProcessor.create(exporter)) + .build(); + Logger logger = provider.get(ValueBodyTest.class.getName()); + + // Value can be a primitive type, like a string, long, double, boolean + logger.logRecordBuilder().setBody(Value.of(1)).emit(); + assertThat(exporter.getFinishedLogRecordItems()) + .hasSize(1) + .satisfiesExactly( + logRecordData -> { + assertThat(logRecordData.getBodyValue()) + .isNotNull() + .satisfies( + body -> { + assertThat(body.getType()).isEqualTo(ValueType.LONG); + assertThat((Long) body.getValue()).isEqualTo(1L); + }); + }); + exporter.reset(); + + // ...or a byte array of raw data + logger + .logRecordBuilder() + .setBody(Value.of("hello world".getBytes(StandardCharsets.UTF_8))) + .emit(); + assertThat(exporter.getFinishedLogRecordItems()) + .hasSize(1) + .satisfiesExactly( + logRecordData -> { + assertThat(logRecordData.getBodyValue()) + .isNotNull() + .satisfies( + body -> { + assertThat(body.getType()).isEqualTo(ValueType.BYTES); + assertThat((ByteBuffer) body.getValue()) + .isEqualTo( + ByteBuffer.wrap("hello world".getBytes(StandardCharsets.UTF_8))); + }); + }); + exporter.reset(); + + // But most commonly it will be used to represent complex structured like a map + logger + .logRecordBuilder() + .setBody( + // The protocol data structure uses a repeated KeyValue to represent a map: + // https://github.com/open-telemetry/opentelemetry-proto/blob/ac3242b03157295e4ee9e616af53b81517b06559/opentelemetry/proto/common/v1/common.proto#L59 + // The comment says that keys aren't allowed to repeat themselves, and because its + // represented as a repeated KeyValue, we need to at least offer the ability to preserve + // order. + // Accepting a Map> makes for a cleaner API, but ordering of the + // entries is lost. To accommodate use cases where ordering should be preserved we + // accept an array of key value pairs, but also a map based alternative (see the + // key_value_list_key entry). + Value.of( + KeyValue.of("str_key", Value.of("value")), + KeyValue.of("bool_key", Value.of(true)), + KeyValue.of("long_key", Value.of(1L)), + KeyValue.of("double_key", Value.of(1.1)), + KeyValue.of("bytes_key", Value.of("bytes".getBytes(StandardCharsets.UTF_8))), + KeyValue.of("arr_key", Value.of(Value.of("entry1"), Value.of(2), Value.of(3.3))), + KeyValue.of( + "key_value_list_key", + Value.of( + new LinkedHashMap>() { + { + put("child_str_key1", Value.of("child_value1")); + put("child_str_key2", Value.of("child_value2")); + } + })))) + .emit(); + assertThat(exporter.getFinishedLogRecordItems()) + .hasSize(1) + .satisfiesExactly( + logRecordData -> { + assertThat(logRecordData.getBodyValue()) + .isNotNull() + // TODO: use fluent asserts when available. See + // https://github.com/open-telemetry/opentelemetry-java/pull/6509 + .satisfies( + body -> { + assertThat(body.getType()).isEqualTo(ValueType.KEY_VALUE_LIST); + assertThat(body) + .isEqualTo( + Value.of( + KeyValue.of("str_key", Value.of("value")), + KeyValue.of("bool_key", Value.of(true)), + KeyValue.of("long_key", Value.of(1L)), + KeyValue.of("double_key", Value.of(1.1)), + KeyValue.of( + "bytes_key", + Value.of("bytes".getBytes(StandardCharsets.UTF_8))), + KeyValue.of( + "arr_key", + Value.of(Value.of("entry1"), Value.of(2), Value.of(3.3))), + KeyValue.of( + "key_value_list_key", + Value.of( + new LinkedHashMap>() { + { + put("child_str_key1", Value.of("child_value1")); + put("child_str_key2", Value.of("child_value2")); + } + })))); + assertThat(body.asString()) + .isEqualTo( + "[" + + "str_key=value, " + + "bool_key=true, " + + "long_key=1, " + + "double_key=1.1, " + + "bytes_key=Ynl0ZXM=, " + + "arr_key=[entry1, 2, 3.3], " + + "key_value_list_key=[child_str_key1=child_value1, child_str_key2=child_value2]" + + "]"); + }); + }); + exporter.reset(); + + // ..or an array (optionally with heterogeneous types) + logger + .logRecordBuilder() + .setBody(Value.of(Value.of("entry1"), Value.of("entry2"), Value.of(3))) + .emit(); + assertThat(exporter.getFinishedLogRecordItems()) + .hasSize(1) + .satisfiesExactly( + logRecordData -> { + assertThat(logRecordData.getBodyValue()) + .isNotNull() + .satisfies( + body -> { + assertThat(body.getType()).isEqualTo(ValueType.ARRAY); + assertThat(body) + .isEqualTo( + Value.of(Value.of("entry1"), Value.of("entry2"), Value.of(3))); + }); + }); + exporter.reset(); + } +} diff --git a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/data/BodyTest.java b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/data/BodyTest.java index d57fb085fb0..57143d4f49f 100644 --- a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/data/BodyTest.java +++ b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/data/BodyTest.java @@ -9,6 +9,7 @@ import org.junit.jupiter.api.Test; +@SuppressWarnings("deprecation") // Testing deprecated code class BodyTest { @Test diff --git a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/export/BatchLogRecordProcessorTest.java b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/export/BatchLogRecordProcessorTest.java index 9dcf07261bb..4f388a5ca49 100644 --- a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/export/BatchLogRecordProcessorTest.java +++ b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/export/BatchLogRecordProcessorTest.java @@ -6,13 +6,17 @@ package io.opentelemetry.sdk.logs.export; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.AssertionsForClassTypes.assertThatCode; import static org.awaitility.Awaitility.await; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyList; import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import io.opentelemetry.api.internal.GuardedBy; @@ -20,6 +24,7 @@ import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.logs.SdkLoggerProvider; import io.opentelemetry.sdk.logs.data.LogRecordData; +import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -110,6 +115,48 @@ void builderInvalidConfig() { () -> BatchLogRecordProcessor.builder(mockLogRecordExporter).setExporterTimeout(null)) .isInstanceOf(NullPointerException.class) .hasMessage("timeout"); + assertThatThrownBy( + () -> BatchLogRecordProcessor.builder(mockLogRecordExporter).setMaxQueueSize(0)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("maxQueueSize must be positive."); + } + + @Test + void builderAdjustMaxBatchSize() { + LogRecordExporter dummyExporter = new CompletableLogRecordExporter(); + + BatchLogRecordProcessorBuilder builder = + BatchLogRecordProcessor.builder(dummyExporter) + .setMaxQueueSize(513) + .setMaxExportBatchSize(1000); + builder.build(); + + assertThat(builder.getMaxExportBatchSize()).isEqualTo(513); + assertThat(builder.getMaxQueueSize()).isEqualTo(513); + } + + @Test + void maxExportBatchSizeExceedsQueueSize() throws InterruptedException { + // Given a processor configured with a maxExportBatchSize > maxQueueSize, ensure that after n = + // maxQueueSize logs are emitted, export is triggered and that the queue is fully drained and + // exported. + int maxQueueSize = 2048; + when(mockLogRecordExporter.export(any())).thenReturn(CompletableResultCode.ofSuccess()); + SdkLoggerProvider sdkLoggerProvider = + SdkLoggerProvider.builder() + .addLogRecordProcessor( + BatchLogRecordProcessor.builder(mockLogRecordExporter) + .setScheduleDelay(Duration.ofSeconds(Integer.MAX_VALUE)) + .setMaxExportBatchSize(2049) + .setMaxQueueSize(maxQueueSize) + .build()) + .build(); + + for (int i = 0; i < maxQueueSize; i++) { + emitLog(sdkLoggerProvider, "log " + i); + } + + await().untilAsserted(() -> verify(mockLogRecordExporter, times(1)).export(any())); } @Test @@ -336,6 +383,7 @@ public void continuesIfExporterTimesOut() throws InterruptedException { .setExporterTimeout(exporterTimeoutMillis, TimeUnit.MILLISECONDS) .setScheduleDelay(1, TimeUnit.MILLISECONDS) .setMaxQueueSize(1) + .setMaxExportBatchSize(1) .build(); SdkLoggerProvider sdkLoggerProvider = SdkLoggerProvider.builder().addLogRecordProcessor(blp).build(); @@ -417,6 +465,13 @@ void shutdownPropagatesFailure() { assertThat(result.isSuccess()).isFalse(); } + @Test + void getLogRecordExporter() { + assertThat( + BatchLogRecordProcessor.builder(mockLogRecordExporter).build().getLogRecordExporter()) + .isSameAs(mockLogRecordExporter); + } + @Test void toString_Valid() { when(mockLogRecordExporter.toString()).thenReturn("MockLogRecordExporter"); diff --git a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/export/SimpleLogRecordProcessorTest.java b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/export/SimpleLogRecordProcessorTest.java index bace5bdbaf6..e50f6624920 100644 --- a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/export/SimpleLogRecordProcessorTest.java +++ b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/export/SimpleLogRecordProcessorTest.java @@ -120,6 +120,14 @@ void shutdown() { verify(logRecordExporter).shutdown(); } + @Test + void getLogRecordExporter() { + assertThat( + ((SimpleLogRecordProcessor) SimpleLogRecordProcessor.create(logRecordExporter)) + .getLogRecordExporter()) + .isSameAs(logRecordExporter); + } + @Test void toString_Valid() { when(logRecordExporter.toString()).thenReturn("MockLogRecordExporter"); diff --git a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/internal/SdkEventEmitterProviderTest.java b/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/internal/SdkEventEmitterProviderTest.java deleted file mode 100644 index 964c67e64c1..00000000000 --- a/sdk/logs/src/test/java/io/opentelemetry/sdk/logs/internal/SdkEventEmitterProviderTest.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.logs.internal; - -import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.sdk.common.Clock; -import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.logs.ReadWriteLogRecord; -import io.opentelemetry.sdk.logs.SdkLoggerProvider; -import io.opentelemetry.sdk.resources.Resource; -import java.util.concurrent.atomic.AtomicReference; -import org.junit.jupiter.api.Test; - -class SdkEventEmitterProviderTest { - - private static final Resource RESOURCE = - Resource.builder().put("resource-key", "resource-value").build(); - - private final Clock clock = mock(Clock.class); - private final AtomicReference seenLog = new AtomicReference<>(); - private final SdkEventEmitterProvider eventEmitterProvider = - SdkEventEmitterProvider.create( - SdkLoggerProvider.builder() - .setResource(RESOURCE) - .addLogRecordProcessor((context, logRecord) -> seenLog.set(logRecord)) - .build(), - clock); - - @Test - void emit_WithDomain() { - when(clock.now()).thenReturn(10L); - - eventEmitterProvider - .eventEmitterBuilder("test-scope") - .setEventDomain("event-domain") - .build() - .emit( - "event-name", - Attributes.builder() - .put("key1", "value1") - // should be overridden by the eventName argument passed to emit - .put("event.name", "foo") - // should be overridden by the eventDomain - .put("event.domain", "foo") - .build()); - - assertThat(seenLog.get().toLogRecordData()) - .hasResource(RESOURCE) - .hasInstrumentationScope(InstrumentationScopeInfo.create("test-scope")) - .hasTimestamp(10L) - .hasAttributes( - Attributes.builder() - .put("key1", "value1") - .put("event.domain", "event-domain") - .put("event.name", "event-name") - .build()); - } - - @Test - void emit_NoDomain() { - when(clock.now()).thenReturn(10L); - - eventEmitterProvider - .eventEmitterBuilder("test-scope") - .build() - .emit( - "event-name", - Attributes.builder() - .put("key1", "value1") - // should be overridden by the eventName argument passed to emit - .put("event.name", "foo") - // should be overridden by the default eventDomain - .put("event.domain", "foo") - .build()); - - assertThat(seenLog.get().toLogRecordData()) - .hasResource(RESOURCE) - .hasInstrumentationScope(InstrumentationScopeInfo.create("test-scope")) - .hasTimestamp(10L) - .hasAttributes( - Attributes.builder() - .put("key1", "value1") - .put("event.domain", "unknown") - .put("event.name", "event-name") - .build()); - } -} diff --git a/sdk/logs/src/testIncubating/java/io/opentelemetry/sdk/logs/LoggerConfigTest.java b/sdk/logs/src/testIncubating/java/io/opentelemetry/sdk/logs/LoggerConfigTest.java new file mode 100644 index 00000000000..f94b97388b4 --- /dev/null +++ b/sdk/logs/src/testIncubating/java/io/opentelemetry/sdk/logs/LoggerConfigTest.java @@ -0,0 +1,132 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.logs; + +import static io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder.nameEquals; +import static io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder.nameMatchesGlob; +import static io.opentelemetry.sdk.logs.internal.LoggerConfig.defaultConfig; +import static io.opentelemetry.sdk.logs.internal.LoggerConfig.enabled; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; + +import io.opentelemetry.api.incubator.logs.ExtendedLogger; +import io.opentelemetry.api.logs.Logger; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor; +import io.opentelemetry.sdk.logs.internal.LoggerConfig; +import io.opentelemetry.sdk.testing.exporter.InMemoryLogRecordExporter; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class LoggerConfigTest { + + @Test + void disableScopes() { + InMemoryLogRecordExporter exporter = InMemoryLogRecordExporter.create(); + SdkLoggerProvider loggerProvider = + SdkLoggerProvider.builder() + // Disable loggerB. Since loggers are enabled by default, loggerA and loggerC are + // enabled. + .addLoggerConfiguratorCondition(nameEquals("loggerB"), LoggerConfig.disabled()) + .addLogRecordProcessor(SimpleLogRecordProcessor.create(exporter)) + .build(); + + Logger loggerA = loggerProvider.get("loggerA"); + Logger loggerB = loggerProvider.get("loggerB"); + Logger loggerC = loggerProvider.get("loggerC"); + + loggerA.logRecordBuilder().setBody("messageA").emit(); + loggerB.logRecordBuilder().setBody("messageB").emit(); + loggerC.logRecordBuilder().setBody("messageC").emit(); + + // Only logs from loggerA and loggerC should be seen + assertThat(exporter.getFinishedLogRecordItems()) + .satisfies( + metrics -> { + Map> logsByScope = + metrics.stream() + .collect(Collectors.groupingBy(LogRecordData::getInstrumentationScopeInfo)); + assertThat(logsByScope.get(InstrumentationScopeInfo.create("loggerA"))).hasSize(1); + assertThat(logsByScope.get(InstrumentationScopeInfo.create("loggerB"))).isNull(); + assertThat(logsByScope.get(InstrumentationScopeInfo.create("loggerC"))).hasSize(1); + }); + // loggerA and loggerC are enabled, loggerB is disabled. + assertThat(((ExtendedLogger) loggerA).isEnabled()).isTrue(); + assertThat(((ExtendedLogger) loggerB).isEnabled()).isFalse(); + assertThat(((ExtendedLogger) loggerC).isEnabled()).isTrue(); + } + + @ParameterizedTest + @MethodSource("loggerConfiguratorArgs") + void loggerConfigurator( + ScopeConfigurator loggerConfigurator, + InstrumentationScopeInfo scope, + LoggerConfig expectedLoggerConfig) { + LoggerConfig loggerConfig = loggerConfigurator.apply(scope); + loggerConfig = loggerConfig == null ? defaultConfig() : loggerConfig; + assertThat(loggerConfig).isEqualTo(expectedLoggerConfig); + } + + private static final InstrumentationScopeInfo scopeCat = InstrumentationScopeInfo.create("cat"); + private static final InstrumentationScopeInfo scopeDog = InstrumentationScopeInfo.create("dog"); + private static final InstrumentationScopeInfo scopeDuck = InstrumentationScopeInfo.create("duck"); + + private static Stream loggerConfiguratorArgs() { + ScopeConfigurator defaultConfigurator = + LoggerConfig.configuratorBuilder().build(); + ScopeConfigurator disableCat = + LoggerConfig.configuratorBuilder() + .addCondition(nameEquals("cat"), LoggerConfig.disabled()) + // Second matching rule for cat should be ignored + .addCondition(nameEquals("cat"), enabled()) + .build(); + ScopeConfigurator disableStartsWithD = + LoggerConfig.configuratorBuilder() + .addCondition(nameMatchesGlob("d*"), LoggerConfig.disabled()) + .build(); + ScopeConfigurator enableCat = + LoggerConfig.configuratorBuilder() + .setDefault(LoggerConfig.disabled()) + .addCondition(nameEquals("cat"), enabled()) + // Second matching rule for cat should be ignored + .addCondition(nameEquals("cat"), LoggerConfig.disabled()) + .build(); + ScopeConfigurator enableStartsWithD = + LoggerConfig.configuratorBuilder() + .setDefault(LoggerConfig.disabled()) + .addCondition(nameMatchesGlob("d*"), LoggerConfig.enabled()) + .build(); + + return Stream.of( + // default + Arguments.of(defaultConfigurator, scopeCat, defaultConfig()), + Arguments.of(defaultConfigurator, scopeDog, defaultConfig()), + Arguments.of(defaultConfigurator, scopeDuck, defaultConfig()), + // default enabled, disable cat + Arguments.of(disableCat, scopeCat, LoggerConfig.disabled()), + Arguments.of(disableCat, scopeDog, enabled()), + Arguments.of(disableCat, scopeDuck, enabled()), + // default enabled, disable pattern + Arguments.of(disableStartsWithD, scopeCat, enabled()), + Arguments.of(disableStartsWithD, scopeDog, LoggerConfig.disabled()), + Arguments.of(disableStartsWithD, scopeDuck, LoggerConfig.disabled()), + // default disabled, enable cat + Arguments.of(enableCat, scopeCat, enabled()), + Arguments.of(enableCat, scopeDog, LoggerConfig.disabled()), + Arguments.of(enableCat, scopeDuck, LoggerConfig.disabled()), + // default disabled, enable pattern + Arguments.of(enableStartsWithD, scopeCat, LoggerConfig.disabled()), + Arguments.of(enableStartsWithD, scopeDog, enabled()), + Arguments.of(enableStartsWithD, scopeDuck, enabled())); + } +} diff --git a/sdk/metrics/build.gradle.kts b/sdk/metrics/build.gradle.kts index 326f1750a61..a0b667943c8 100644 --- a/sdk/metrics/build.gradle.kts +++ b/sdk/metrics/build.gradle.kts @@ -15,7 +15,7 @@ otelJava.moduleName.set("io.opentelemetry.sdk.metrics") dependencies { api(project(":api:all")) api(project(":sdk:common")) - implementation(project(":extensions:incubator")) + compileOnly(project(":api:incubator")) compileOnly("org.codehaus.mojo:animal-sniffer-annotations") @@ -25,13 +25,25 @@ dependencies { testImplementation(project(":sdk:testing")) testImplementation("com.google.guava:guava") + testImplementation("com.google.guava:guava-testlib") jmh(project(":sdk:trace")) jmh(project(":sdk:testing")) } +dependencyCheck { + skipConfigurations.add("debugEnabledTestAnnotationProcessor") +} + testing { suites { + register("testIncubating") { + dependencies { + implementation(project(":sdk:testing")) + implementation(project(":api:incubator")) + implementation("com.google.guava:guava") + } + } register("debugEnabledTest") { targets { all { @@ -41,6 +53,13 @@ testing { } } } + register("jmhBasedTest") { + dependencies { + implementation("org.openjdk.jmh:jmh-core") + implementation("org.openjdk.jmh:jmh-generator-bytecode") + annotationProcessor("org.openjdk.jmh:jmh-generator-annprocess") + } + } } } diff --git a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/ExemplarClockBenchmarks.java b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/ExemplarClockBenchmarks.java new file mode 100644 index 00000000000..28d0c855134 --- /dev/null +++ b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/ExemplarClockBenchmarks.java @@ -0,0 +1,44 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import io.opentelemetry.sdk.common.Clock; +import java.util.concurrent.TimeUnit; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Warmup; + +/** + * {@code io.opentelemetry.sdk.metrics.internal.exemplar.ReservoirCell} relies on {@link Clock} to + * obtain the measurement time when storing exemplar values. This benchmark illustrates the + * performance impact of using the higher precision {@link Clock#now()} instead of {@link + * Clock#now(boolean)} with {@code highPrecision=false}. + */ +@BenchmarkMode({Mode.AverageTime}) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@Warmup(iterations = 5, time = 1) +@Measurement(iterations = 10, time = 1) +@Fork(1) +public class ExemplarClockBenchmarks { + + private static final Clock clock = Clock.getDefault(); + + @SuppressWarnings("ReturnValueIgnored") + @Benchmark + public void now_lowPrecision() { + clock.now(false); + } + + @SuppressWarnings("ReturnValueIgnored") + @Benchmark + public void now_highPrecision() { + clock.now(true); + } +} diff --git a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/MetricAdviceBenchmark.java b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/MetricAdviceBenchmark.java new file mode 100644 index 00000000000..66c1a830607 --- /dev/null +++ b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/MetricAdviceBenchmark.java @@ -0,0 +1,273 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.incubator.metrics.ExtendedLongCounterBuilder; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; + +@BenchmarkMode({Mode.AverageTime}) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@Warmup(iterations = 5, time = 1) +@Measurement(iterations = 10, time = 1) +@Fork(1) +public class MetricAdviceBenchmark { + + static final AttributeKey HTTP_REQUEST_METHOD = + AttributeKey.stringKey("http.request.method"); + static final AttributeKey URL_PATH = AttributeKey.stringKey("url.path"); + static final AttributeKey URL_SCHEME = AttributeKey.stringKey("url.scheme"); + static final AttributeKey HTTP_RESPONSE_STATUS_CODE = + AttributeKey.longKey("http.response.status_code"); + static final AttributeKey HTTP_ROUTE = AttributeKey.stringKey("http.route"); + static final AttributeKey NETWORK_PROTOCOL_NAME = + AttributeKey.stringKey("network.protocol.name"); + static final AttributeKey SERVER_PORT = AttributeKey.longKey("server.port"); + static final AttributeKey URL_QUERY = AttributeKey.stringKey("url.query"); + static final AttributeKey CLIENT_ADDRESS = AttributeKey.stringKey("client.address"); + static final AttributeKey NETWORK_PEER_ADDRESS = + AttributeKey.stringKey("network.peer.address"); + static final AttributeKey NETWORK_PEER_PORT = AttributeKey.longKey("network.peer.port"); + static final AttributeKey NETWORK_PROTOCOL_VERSION = + AttributeKey.stringKey("network.protocol.version"); + static final AttributeKey SERVER_ADDRESS = AttributeKey.stringKey("server.address"); + static final AttributeKey USER_AGENT_ORIGINAL = + AttributeKey.stringKey("user_agent.original"); + + static final List> httpServerMetricAttributeKeys = + Arrays.asList( + HTTP_REQUEST_METHOD, + URL_SCHEME, + HTTP_RESPONSE_STATUS_CODE, + HTTP_ROUTE, + NETWORK_PROTOCOL_NAME, + SERVER_PORT, + NETWORK_PROTOCOL_VERSION, + SERVER_ADDRESS); + + static Attributes httpServerMetricAttributes() { + return Attributes.builder() + .put(HTTP_REQUEST_METHOD, "GET") + .put(URL_SCHEME, "http") + .put(HTTP_RESPONSE_STATUS_CODE, 200) + .put(HTTP_ROUTE, "/v1/users/{id}") + .put(NETWORK_PROTOCOL_NAME, "http") + .put(SERVER_PORT, 8080) + .put(NETWORK_PROTOCOL_VERSION, "1.1") + .put(SERVER_ADDRESS, "localhost") + .build(); + } + + static Attributes httpServerSpanAttributes() { + return Attributes.builder() + .put(HTTP_REQUEST_METHOD, "GET") + .put(URL_PATH, "/v1/users/123") + .put(URL_SCHEME, "http") + .put(HTTP_RESPONSE_STATUS_CODE, 200) + .put(HTTP_ROUTE, "/v1/users/{id}") + .put(NETWORK_PROTOCOL_NAME, "http") + .put(SERVER_PORT, 8080) + .put(URL_QUERY, "with=email") + .put(CLIENT_ADDRESS, "192.168.0.17") + .put(NETWORK_PEER_ADDRESS, "192.168.0.17") + .put(NETWORK_PEER_PORT, 11265) + .put(NETWORK_PROTOCOL_VERSION, "1.1") + .put(SERVER_ADDRESS, "localhost") + .put(USER_AGENT_ORIGINAL, "okhttp/1.27.2") + .build(); + } + + static final Attributes CACHED_HTTP_SERVER_SPAN_ATTRIBUTES = httpServerSpanAttributes(); + + @State(Scope.Benchmark) + public static class ThreadState { + + @Param InstrumentParam instrumentParam; + + SdkMeterProvider meterProvider; + + @Setup(Level.Iteration) + public void setup() { + meterProvider = + SdkMeterProvider.builder() + .registerMetricReader(InMemoryMetricReader.createDelta()) + .build(); + Meter meter = meterProvider.get("meter"); + instrumentParam.instrument().setup(meter); + } + + @TearDown + public void tearDown() { + meterProvider.shutdown().join(10, TimeUnit.SECONDS); + } + } + + @Benchmark + @Threads(1) + public void record(ThreadState threadState) { + threadState.instrumentParam.instrument().record(1); + } + + @SuppressWarnings("ImmutableEnumChecker") + public enum InstrumentParam { + /** + * Record HTTP span attributes without advice. This baseline shows the CPU and memory allocation + * independent of advice. + */ + NO_ADVICE_ALL_ATTRIBUTES( + new Instrument() { + private LongCounter counter; + + @Override + void setup(Meter meter) { + counter = ((ExtendedLongCounterBuilder) meter.counterBuilder("counter")).build(); + } + + @Override + void record(long value) { + counter.add(value, httpServerSpanAttributes()); + } + }), + /** + * Record HTTP metric attributes without advice. This baseline shows the lower bound if + * attribute filtering was done in instrumentation instead of the metrics SDK with advice. It's + * not quite fair though because instrumentation would have to separately allocate attributes + * for spans and metrics, whereas with advice, we can manage to only allocate span attributes + * and a lightweight metrics attributes view derived from span attributes. + */ + NO_ADVICE_FILTERED_ATTRIBUTES( + new Instrument() { + private LongCounter counter; + + @Override + void setup(Meter meter) { + counter = ((ExtendedLongCounterBuilder) meter.counterBuilder("counter")).build(); + } + + @Override + void record(long value) { + counter.add(value, httpServerMetricAttributes()); + } + }), + /** + * Record cached HTTP span attributes without advice. This baseline helps isolate the CPU and + * memory allocation for recording vs. creating attributes. + */ + NO_ADVICE_ALL_ATTRIBUTES_CACHED( + new Instrument() { + private LongCounter counter; + + @Override + void setup(Meter meter) { + counter = ((ExtendedLongCounterBuilder) meter.counterBuilder("counter")).build(); + } + + @Override + void record(long value) { + counter.add(value, CACHED_HTTP_SERVER_SPAN_ATTRIBUTES); + } + }), + /** + * Record HTTP span attributes with advice filtering to HTTP metric attributes. This is meant to + * realistically demonstrate a typical HTTP server instrumentation scenario. + */ + ADVICE_ALL_ATTRIBUTES( + new Instrument() { + private LongCounter counter; + + @Override + void setup(Meter meter) { + counter = + ((ExtendedLongCounterBuilder) meter.counterBuilder("counter")) + .setAttributesAdvice(httpServerMetricAttributeKeys) + .build(); + } + + @Override + void record(long value) { + counter.add(value, httpServerSpanAttributes()); + } + }), + /** + * Record HTTP metric attributes with advice filtering to HTTP metric attributes. This + * demonstrates the overhead of advice when no attributes are filtered. + */ + ADVICE_FILTERED_ATTRIBUTES( + new Instrument() { + private LongCounter counter; + + @Override + void setup(Meter meter) { + counter = + ((ExtendedLongCounterBuilder) meter.counterBuilder("counter")) + .setAttributesAdvice(httpServerMetricAttributeKeys) + .build(); + } + + @Override + void record(long value) { + counter.add(value, httpServerMetricAttributes()); + } + }), + /** + * Record cached HTTP span attributes with advice filtering to HTTP metric attributes. This + * isolates the CPU and memory allocation for applying advice vs. creating attributes. + */ + ADVICE_ALL_ATTRIBUTES_CACHED( + new Instrument() { + private LongCounter counter; + + @Override + void setup(Meter meter) { + counter = + ((ExtendedLongCounterBuilder) meter.counterBuilder("counter")) + .setAttributesAdvice(httpServerMetricAttributeKeys) + .build(); + } + + @Override + void record(long value) { + counter.add(value, CACHED_HTTP_SERVER_SPAN_ATTRIBUTES); + } + }); + + private final Instrument instrument; + + InstrumentParam(Instrument instrument) { + this.instrument = instrument; + } + + Instrument instrument() { + return instrument; + } + } + + private abstract static class Instrument { + abstract void setup(Meter meter); + + abstract void record(long value); + } +} diff --git a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/MetricsBenchmarks.java b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/MetricsBenchmarks.java index a77c3200d28..c181f5dd1ba 100644 --- a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/MetricsBenchmarks.java +++ b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/MetricsBenchmarks.java @@ -51,7 +51,7 @@ public class MetricsBenchmarks { } } - @State(Scope.Thread) + @State(Scope.Benchmark) public static class ThreadState { @Param TestSdk sdk; @@ -78,7 +78,7 @@ public void setup(ThreadParams threadParams) { } @TearDown - public void tearDown(ThreadParams threadParams) { + public void tearDown() { contextScope.close(); span.end(); } diff --git a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/TestSdk.java b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/TestSdk.java index 4f461d6fa63..1c8e760425d 100644 --- a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/TestSdk.java +++ b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/TestSdk.java @@ -38,7 +38,7 @@ Meter build() { .get("io.opentelemetry.sdk.metrics"); } }), - SDK( + SDK_CUMULATIVE( new SdkBuilder() { @Override Meter build() { @@ -50,6 +50,19 @@ Meter build() { .build() .get("io.opentelemetry.sdk.metrics"); } + }), + SDK_DELTA( + new SdkBuilder() { + @Override + Meter build() { + return SdkMeterProvider.builder() + .setClock(Clock.getDefault()) + .setResource(Resource.empty()) + // Must register reader for real SDK. + .registerMetricReader(InMemoryMetricReader.createDelta()) + .build() + .get("io.opentelemetry.sdk.metrics"); + } }); private final SdkBuilder sdkBuilder; diff --git a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExponentialHistogramIndexerBenchmark.java b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExponentialHistogramIndexerBenchmark.java index 66c3d579e4f..304087f734f 100644 --- a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExponentialHistogramIndexerBenchmark.java +++ b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExponentialHistogramIndexerBenchmark.java @@ -21,7 +21,12 @@ import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; -/** Measures runtime cost of computing bucket indexes for exponential histograms. */ +/** + * Measures runtime cost of computing bucket indexes for exponential histograms. + * + *

This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) @Measurement(iterations = 5, time = 1) diff --git a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/HistogramAggregationParam.java b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/HistogramAggregationParam.java index ddece4dbee2..162794f52d8 100644 --- a/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/HistogramAggregationParam.java +++ b/sdk/metrics/src/jmh/java/io/opentelemetry/sdk/metrics/internal/aggregator/HistogramAggregationParam.java @@ -5,6 +5,8 @@ package io.opentelemetry.sdk.metrics.internal.aggregator; +import static io.opentelemetry.sdk.common.export.MemoryMode.IMMUTABLE_DATA; + import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import java.util.Collections; @@ -15,15 +17,19 @@ public enum HistogramAggregationParam { new DoubleExplicitBucketHistogramAggregator( ExplicitBucketHistogramUtils.createBoundaryArray( ExplicitBucketHistogramUtils.DEFAULT_HISTOGRAM_BUCKET_BOUNDARIES), - ExemplarReservoir::doubleNoSamples)), + ExemplarReservoir::doubleNoSamples, + IMMUTABLE_DATA)), EXPLICIT_SINGLE_BUCKET( new DoubleExplicitBucketHistogramAggregator( ExplicitBucketHistogramUtils.createBoundaryArray(Collections.emptyList()), - ExemplarReservoir::doubleNoSamples)), + ExemplarReservoir::doubleNoSamples, + IMMUTABLE_DATA)), EXPONENTIAL_SMALL_CIRCULAR_BUFFER( - new DoubleBase2ExponentialHistogramAggregator(ExemplarReservoir::doubleNoSamples, 20, 0)), + new DoubleBase2ExponentialHistogramAggregator( + ExemplarReservoir::doubleNoSamples, 20, 0, IMMUTABLE_DATA)), EXPONENTIAL_CIRCULAR_BUFFER( - new DoubleBase2ExponentialHistogramAggregator(ExemplarReservoir::doubleNoSamples, 160, 0)); + new DoubleBase2ExponentialHistogramAggregator( + ExemplarReservoir::doubleNoSamples, 160, 0, IMMUTABLE_DATA)); private final Aggregator aggregator; diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/AttributesGenerator.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/AttributesGenerator.java new file mode 100644 index 00000000000..3aea1dbd6d6 --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/AttributesGenerator.java @@ -0,0 +1,45 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import io.opentelemetry.api.common.Attributes; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Random; + +public class AttributesGenerator { + + private AttributesGenerator() {} + + /** + * Generates a list of unique attributes, with a single attribute key, and random value. + * + * @param uniqueAttributesCount The amount of unique attribute sets to generate + * @return The list of generates {@link Attributes} + */ + public static List generate(int uniqueAttributesCount) { + Random random = new Random(); + HashSet attributeSet = new HashSet<>(); + ArrayList attributesList = new ArrayList<>(uniqueAttributesCount); + String last = "aaaaaaaaaaaaaaaaaaaaaaaaaa"; + for (int i = 0; i < uniqueAttributesCount; i++) { + char[] chars = last.toCharArray(); + int attempts = 0; + do { + chars[random.nextInt(last.length())] = (char) (random.nextInt(26) + 'a'); + } while (attributeSet.contains(new String(chars)) && ++attempts < 1000); + if (attributeSet.contains(new String(chars))) { + throw new IllegalStateException("Couldn't create new random attributes"); + } + last = new String(chars); + attributesList.add(Attributes.builder().put("key", last).build()); + attributeSet.add(last); + } + + return attributesList; + } +} diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/InstrumentGarbageCollectionBenchmark.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/InstrumentGarbageCollectionBenchmark.java new file mode 100644 index 00000000000..9e761d6b2bb --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/InstrumentGarbageCollectionBenchmark.java @@ -0,0 +1,125 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; +import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil; +import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarFilter; +import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType.InstrumentTester; +import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType.TestInstrumentsState; +import java.time.Duration; +import java.util.List; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; + +/** + * Run this through {@link InstrumentGarbageCollectionBenchmarkTest}, as it runs it embedded with + * the GC profiler which what this test designed for (No need for command line run) + * + *

This test creates 10 asynchronous counters (any asynchronous instrument will do as the code + * path is almost the same for all async instrument types), and 1000 attribute sets. Each time the + * test runs, it calls `flush` which effectively calls the callback for each counter. Each such + * callback records a random number for each of the 1000 attribute sets. The result list ends up in + * {@link NoopMetricExporter} which does nothing with it. + * + *

This is repeated 100 times, collectively called Operation in the statistics and each such + * operation is repeated 20 times - known as Iteration. + * + *

Each such test is repeated, with a brand new JVM, for all combinations of {@link MemoryMode} + * and {@link AggregationTemporality}. This is done since each combination has a different code + * path. + */ +@BenchmarkMode(Mode.SingleShotTime) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@Measurement(iterations = 10, batchSize = 10) +@Warmup(iterations = 10, batchSize = 10) +@Fork(1) +public class InstrumentGarbageCollectionBenchmark { + + @State(value = Scope.Benchmark) + public static class ThreadState { + private final int cardinality; + private final int instrumentCount; + @Param public TestInstrumentType testInstrumentType; + @Param public AggregationTemporality aggregationTemporality; + @Param public MemoryMode memoryMode; + SdkMeterProvider sdkMeterProvider; + private final Random random = new Random(); + List attributesList; + private TestInstrumentsState testInstrumentsState; + private InstrumentTester instrumentTester; + + /** Creates a ThreadState. */ + @SuppressWarnings("unused") + public ThreadState() { + cardinality = 1000; + instrumentCount = 10; + } + + @SuppressWarnings("SpellCheckingInspection") + @Setup + public void setup() { + instrumentTester = testInstrumentType.createInstrumentTester(); + PeriodicMetricReader metricReader = + PeriodicMetricReader.builder( + // Configure an exporter that configures the temporality and aggregation + // for the test case, but otherwise drops the data on export + new NoopMetricExporter( + aggregationTemporality, instrumentTester.testedAggregation(), memoryMode)) + // Effectively disable periodic reading so reading is only done on #flush() + .setInterval(Duration.ofSeconds(Integer.MAX_VALUE)) + .build(); + SdkMeterProviderBuilder builder = + SdkMeterProvider.builder().registerMetricReader(metricReader, unused -> cardinality + 1); + + attributesList = AttributesGenerator.generate(cardinality); + + // Disable examplars + SdkMeterProviderUtil.setExemplarFilter(builder, ExemplarFilter.alwaysOff()); + + sdkMeterProvider = builder.build(); + testInstrumentsState = + instrumentTester.buildInstruments( + instrumentCount, sdkMeterProvider, attributesList, random); + } + + @TearDown + public void tearDown() { + sdkMeterProvider.shutdown().join(10, TimeUnit.SECONDS); + } + } + + /** + * Collects all asynchronous instruments metric data. + * + * @param threadState thread-state + */ + @Benchmark + @Threads(value = 1) + public void recordAndCollect(ThreadState threadState) { + threadState.instrumentTester.recordValuesInInstruments( + threadState.testInstrumentsState, threadState.attributesList, threadState.random); + threadState.sdkMeterProvider.forceFlush().join(10, TimeUnit.SECONDS); + } +} diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/InstrumentGarbageCollectionBenchmarkTest.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/InstrumentGarbageCollectionBenchmarkTest.java new file mode 100644 index 00000000000..6fba53e86cd --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/InstrumentGarbageCollectionBenchmarkTest.java @@ -0,0 +1,134 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.resources.Resource; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import org.assertj.core.data.Offset; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.Test; +import org.openjdk.jmh.infra.BenchmarkParams; +import org.openjdk.jmh.results.BenchmarkResult; +import org.openjdk.jmh.results.Result; +import org.openjdk.jmh.results.RunResult; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; + +public class InstrumentGarbageCollectionBenchmarkTest { + + /** + * This test validates that in {@link MemoryMode#REUSABLE_DATA}, any {@link + * MetricStorage#collect(Resource, InstrumentationScopeInfo, long, long)} barely allocates memory + * which is then subsequently garbage collected. It is done so comparatively to {@link + * MemoryMode#IMMUTABLE_DATA}, + * + *

It runs the JMH test {@link InstrumentGarbageCollectionBenchmark} with GC profiler, and + * measures for each parameter combination the garbage collector normalized rate (bytes allocated + * per Operation). + * + *

Memory allocations can be hidden even at an innocent foreach loop on a collection, which + * under the hood allocates an internal object O(N) times. Someone can accidentally refactor such + * loop, resulting in 30% increase of garbage collected objects during a single collect() run. + */ + @SuppressWarnings("rawtypes") + @Test + public void normalizedAllocationRateTest() throws RunnerException { + // OTel GitHub CI Workflow (see .github/) sets an environment variable + // (RUN_JMH_BASED_TESTS=true). + // We set it only there since it's a lengthy test (roughly 2.5min) + // and we want to run it only in CI. + Assumptions.assumeTrue( + "true".equals(System.getenv("RUN_JMH_BASED_TESTS")), + "This test should only run in GitHub CI since it's long"); + + // Runs InstrumentGarbageCollectionBenchmark + // with garbage collection profiler + Options opt = + new OptionsBuilder() + .include(InstrumentGarbageCollectionBenchmark.class.getSimpleName()) + .addProfiler("gc") + .shouldFailOnError(true) + .jvmArgs("-Xmx1500m") + .build(); + Collection results = new Runner(opt).run(); + + // Collect the normalized GC allocation rate per parameters combination + Map testInstrumentTypeResultsMap = new HashMap<>(); + for (RunResult result : results) { + for (BenchmarkResult benchmarkResult : result.getBenchmarkResults()) { + BenchmarkParams benchmarkParams = benchmarkResult.getParams(); + + String memoryMode = benchmarkParams.getParam("memoryMode"); + String aggregationTemporality = benchmarkParams.getParam("aggregationTemporality"); + String testInstrumentType = benchmarkParams.getParam("testInstrumentType"); + assertThat(memoryMode).isNotNull(); + assertThat(aggregationTemporality).isNotNull(); + assertThat(testInstrumentType).isNotNull(); + + Map secondaryResults = benchmarkResult.getSecondaryResults(); + Result allocRateNorm = secondaryResults.get("gc.alloc.rate.norm"); + assertThat(allocRateNorm) + .describedAs("Allocation rate in secondary results: %s", secondaryResults) + .isNotNull(); + + testInstrumentTypeResultsMap + .computeIfAbsent(testInstrumentType, k -> new TestInstrumentTypeResults()) + .aggregationTemporalityToMemoryModeResult + .computeIfAbsent(aggregationTemporality, k -> new HashMap<>()) + .put(memoryMode, allocRateNorm.getScore()); + } + } + + testInstrumentTypeResultsMap.forEach( + (testInstrumentTypeString, testInstrumentTypeResults) -> { + Map> resultMap = + testInstrumentTypeResults.aggregationTemporalityToMemoryModeResult; + assertThat(resultMap).hasSameSizeAs(AggregationTemporality.values()); + + // Asserts that reusable data GC allocation rate is a tiny fraction of immutable data + // GC allocation rate + resultMap.forEach( + (aggregationTemporality, memoryModeToAllocRateMap) -> { + Double immutableDataAllocRate = + memoryModeToAllocRateMap.get(MemoryMode.IMMUTABLE_DATA.toString()); + Double reusableDataAllocRate = + memoryModeToAllocRateMap.get(MemoryMode.REUSABLE_DATA.toString()); + + assertThat(immutableDataAllocRate).isNotNull().isNotZero(); + assertThat(reusableDataAllocRate).isNotNull().isNotZero(); + + TestInstrumentType testInstrumentType = + TestInstrumentType.valueOf(testInstrumentTypeString); + float dataAllocRateReductionPercentage = + testInstrumentType.getDataAllocRateReductionPercentage(); + double allowedOffset = testInstrumentType.getAllowedPercentOffset(); + + // If this test suddenly fails for you this means you have changed the code in a way + // that allocates more memory than before. You can find out where, by running + // ProfileBenchmark class and looking at the flame graph. Make sure to + // set the parameters according to where it failed for. + assertThat(100 - (reusableDataAllocRate / immutableDataAllocRate) * 100) + .describedAs( + "Aggregation temporality = %s, testInstrumentType = %s", + aggregationTemporality, testInstrumentTypeString) + .isCloseTo(dataAllocRateReductionPercentage, Offset.offset(allowedOffset)); + }); + }); + } + + static class TestInstrumentTypeResults { + Map> aggregationTemporalityToMemoryModeResult = new HashMap<>(); + } +} diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/NoopMetricExporter.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/NoopMetricExporter.java new file mode 100644 index 00000000000..533a3cc34dc --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/NoopMetricExporter.java @@ -0,0 +1,63 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import java.util.Collection; + +public class NoopMetricExporter implements MetricExporter { + private final AggregationTemporality aggregationTemporality; + private final Aggregation aggregation; + private final MemoryMode memoryMode; + + /** + * Create a {@link NoopMetricExporter} with aggregationTemporality, aggregation and memory mode. + */ + public NoopMetricExporter( + AggregationTemporality aggregationTemporality, + Aggregation aggregation, + MemoryMode memoryMode) { + this.aggregationTemporality = aggregationTemporality; + this.aggregation = aggregation; + this.memoryMode = memoryMode; + } + + @Override + public CompletableResultCode export(Collection metrics) { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public Aggregation getDefaultAggregation(InstrumentType instrumentType) { + return aggregation; + } + + @Override + public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) { + return aggregationTemporality; + } + + @Override + public MemoryMode getMemoryMode() { + return memoryMode; + } +} diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/ProfileBenchmark.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/ProfileBenchmark.java new file mode 100644 index 00000000000..2d091bfd725 --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/ProfileBenchmark.java @@ -0,0 +1,75 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; + +/** + * This benchmark class is used to see memory allocation flame graphs for a single run. + * + *

Steps: + * + *

    + *
  1. Follow download instructions for async-profiler, located at this location + *
  2. Assuming you have extracted it at /tmp/async-profiler-2.9-macos, add the following to your + * JVM arguments of your run configuration: + *
    + *       -agentpath:/tmp/async-profiler-2.9-macos/build/libasyncProfiler.so=start,event=alloc,flamegraph,file=/tmp/profiled_data.html
    + *       
    + *
  3. Tune the parameters as you see fit (They are marked below with "Parameters") + *
  4. Run the class (its main function) + *
  5. Open /tmp/profiled_data.html with your browser + *
  6. Use the flame graph to see where the allocations are happening the most and fix + *
  7. Run {@link InstrumentGarbageCollectionBenchmark} and see if it passes now + *
  8. If not, repeat + *
+ */ +public class ProfileBenchmark { + + private ProfileBenchmark() {} + + public static void main(String[] args) { + // Parameters + AggregationTemporality aggregationTemporality = AggregationTemporality.DELTA; + MemoryMode memoryMode = MemoryMode.REUSABLE_DATA; + TestInstrumentType testInstrumentType = TestInstrumentType.DOUBLE_LAST_VALUE; + + InstrumentGarbageCollectionBenchmark.ThreadState benchmarkSetup = + new InstrumentGarbageCollectionBenchmark.ThreadState(); + + benchmarkSetup.aggregationTemporality = aggregationTemporality; + benchmarkSetup.memoryMode = memoryMode; + benchmarkSetup.testInstrumentType = testInstrumentType; + + InstrumentGarbageCollectionBenchmark benchmark = new InstrumentGarbageCollectionBenchmark(); + + benchmarkSetup.setup(); + + warmup(benchmark, benchmarkSetup); + + // This is divided explicitly to two methods so you can focus on `measure` in the flame graph + // when trying to decrease the allocations + measure(benchmark, benchmarkSetup); + } + + public static void warmup( + InstrumentGarbageCollectionBenchmark benchmark, + InstrumentGarbageCollectionBenchmark.ThreadState benchmarkSetup) { + for (int i = 0; i < 10; i++) { + benchmark.recordAndCollect(benchmarkSetup); + } + } + + public static void measure( + InstrumentGarbageCollectionBenchmark benchmark, + InstrumentGarbageCollectionBenchmark.ThreadState benchmarkSetup) { + for (int i = 0; i < 200; i++) { + benchmark.recordAndCollect(benchmarkSetup); + } + } +} diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/TestInstrumentType.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/TestInstrumentType.java new file mode 100644 index 00000000000..5a462fd94c8 --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/TestInstrumentType.java @@ -0,0 +1,93 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.internal.state.tester.AsyncCounterTester; +import io.opentelemetry.sdk.metrics.internal.state.tester.DoubleLastValueTester; +import io.opentelemetry.sdk.metrics.internal.state.tester.DoubleSumTester; +import io.opentelemetry.sdk.metrics.internal.state.tester.ExplicitBucketHistogramTester; +import io.opentelemetry.sdk.metrics.internal.state.tester.ExponentialHistogramTester; +import io.opentelemetry.sdk.metrics.internal.state.tester.LongLastValueTester; +import io.opentelemetry.sdk.metrics.internal.state.tester.LongSumTester; +import java.util.List; +import java.util.Random; +import java.util.function.Supplier; + +@SuppressWarnings("ImmutableEnumChecker") +public enum TestInstrumentType { + ASYNC_COUNTER(AsyncCounterTester::new), + EXPONENTIAL_HISTOGRAM(ExponentialHistogramTester::new), + EXPLICIT_BUCKET(ExplicitBucketHistogramTester::new), + LONG_SUM( + LongSumTester::new, + /* dataAllocRateReductionPercentage= */ 97.3f, + /* allowedPercentOffset= */ 4.0f), + DOUBLE_SUM( + DoubleSumTester::new, + /* dataAllocRateReductionPercentage= */ 97.3f, + /* allowedPercentOffset= */ 2.0f), + LONG_LAST_VALUE( + LongLastValueTester::new, + /* dataAllocRateReductionPercentage= */ 97.3f, + /* allowedPercentOffset= */ 4.0f), + DOUBLE_LAST_VALUE( + DoubleLastValueTester::new, + /* dataAllocRateReductionPercentage= */ 97.3f, + /* allowedPercentOffset= */ 4.0f); + + private final Supplier instrumentTesterInitializer; + private final float dataAllocRateReductionPercentage; + private final double allowedPercentOffset; + + @SuppressWarnings("unused") + TestInstrumentType(Supplier instrumentTesterInitializer) { + this.dataAllocRateReductionPercentage = 99.8f; // default + this.instrumentTesterInitializer = instrumentTesterInitializer; + this.allowedPercentOffset = 2.0f; + } + + // Some instruments have different reduction percentage. + TestInstrumentType( + Supplier instrumentTesterInitializer, + float dataAllocRateReductionPercentage, + float allowedPercentOffset) { + this.instrumentTesterInitializer = instrumentTesterInitializer; + this.dataAllocRateReductionPercentage = dataAllocRateReductionPercentage; + this.allowedPercentOffset = allowedPercentOffset; + } + + float getDataAllocRateReductionPercentage() { + return dataAllocRateReductionPercentage; + } + + public double getAllowedPercentOffset() { + return allowedPercentOffset; + } + + InstrumentTester createInstrumentTester() { + return instrumentTesterInitializer.get(); + } + + public interface InstrumentTester { + Aggregation testedAggregation(); + + TestInstrumentsState buildInstruments( + double instrumentCount, + SdkMeterProvider sdkMeterProvider, + List attributesList, + Random random); + + void recordValuesInInstruments( + TestInstrumentsState testInstrumentsState, List attributesList, Random random); + } + + public interface TestInstrumentsState {} + + public static class EmptyInstrumentsState implements TestInstrumentsState {} +} diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/AsyncCounterTester.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/AsyncCounterTester.java new file mode 100644 index 00000000000..f926fb343b6 --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/AsyncCounterTester.java @@ -0,0 +1,51 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state.tester; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType; +import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType.EmptyInstrumentsState; +import java.util.List; +import java.util.Random; + +public class AsyncCounterTester implements TestInstrumentType.InstrumentTester { + @Override + public Aggregation testedAggregation() { + return Aggregation.sum(); + } + + @SuppressWarnings("ForLoopReplaceableByForEach") // This is for GC sensitivity testing: no streams + @Override + public TestInstrumentType.TestInstrumentsState buildInstruments( + double instrumentCount, + SdkMeterProvider sdkMeterProvider, + List attributesList, + Random random) { + for (int i = 0; i < instrumentCount; i++) { + sdkMeterProvider + .get("meter") + .counterBuilder("counter" + i) + .buildWithCallback( + observableLongMeasurement -> { + for (int j = 0; j < attributesList.size(); j++) { + Attributes attributes = attributesList.get(j); + observableLongMeasurement.record(random.nextInt(10_000), attributes); + } + }); + } + return new EmptyInstrumentsState(); + } + + @Override + public void recordValuesInInstruments( + TestInstrumentType.TestInstrumentsState testInstrumentsState, + List attributesList, + Random random) { + // No need, all done via the callbacks + } +} diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/DoubleLastValueTester.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/DoubleLastValueTester.java new file mode 100644 index 00000000000..c9144ad2c1a --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/DoubleLastValueTester.java @@ -0,0 +1,50 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state.tester; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType; +import java.util.List; +import java.util.Random; + +public class DoubleLastValueTester implements TestInstrumentType.InstrumentTester { + + @Override + public Aggregation testedAggregation() { + return Aggregation.lastValue(); + } + + @SuppressWarnings("ForLoopReplaceableByForEach") // This is for GC sensitivity testing: no streams + @Override + public TestInstrumentType.TestInstrumentsState buildInstruments( + double instrumentCount, + SdkMeterProvider sdkMeterProvider, + List attributesList, + Random random) { + Meter meter = sdkMeterProvider.meterBuilder("meter").build(); + meter + .gaugeBuilder("test.double.last.value") + .buildWithCallback( + observableDoubleMeasurement -> { + for (int j = 0; j < attributesList.size(); j++) { + observableDoubleMeasurement.record(1.2f, attributesList.get(j)); + } + }); + + return new TestInstrumentType.EmptyInstrumentsState(); + } + + @Override + public void recordValuesInInstruments( + TestInstrumentType.TestInstrumentsState testInstrumentsState, + List attributesList, + Random random) { + // Recording is done by the callback define above + } +} diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/DoubleSumTester.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/DoubleSumTester.java new file mode 100644 index 00000000000..47cf4e4023e --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/DoubleSumTester.java @@ -0,0 +1,58 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state.tester; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleCounter; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType; +import java.util.List; +import java.util.Random; + +public class DoubleSumTester implements TestInstrumentType.InstrumentTester { + private static final int measurementsPerAttributeSet = 1_000; + + static class DoubleSumState implements TestInstrumentType.TestInstrumentsState { + DoubleCounter doubleCounter; + } + + @Override + public Aggregation testedAggregation() { + return Aggregation.sum(); + } + + @Override + public TestInstrumentType.TestInstrumentsState buildInstruments( + double instrumentCount, + SdkMeterProvider sdkMeterProvider, + List attributesList, + Random random) { + DoubleSumState doubleSumState = new DoubleSumState(); + + Meter meter = sdkMeterProvider.meterBuilder("meter").build(); + doubleSumState.doubleCounter = meter.counterBuilder("test.double.sum").ofDoubles().build(); + + return doubleSumState; + } + + @SuppressWarnings("ForLoopReplaceableByForEach") // This is for GC sensitivity testing: no streams + @Override + public void recordValuesInInstruments( + TestInstrumentType.TestInstrumentsState testInstrumentsState, + List attributesList, + Random random) { + DoubleSumState state = (DoubleSumState) testInstrumentsState; + + for (int j = 0; j < attributesList.size(); j++) { + Attributes attributes = attributesList.get(j); + for (int i = 0; i < measurementsPerAttributeSet; i++) { + state.doubleCounter.add(1.2f, attributes); + } + } + } +} diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/ExplicitBucketHistogramTester.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/ExplicitBucketHistogramTester.java new file mode 100644 index 00000000000..64ef6ca970d --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/ExplicitBucketHistogramTester.java @@ -0,0 +1,62 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state.tester; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.internal.aggregator.ExplicitBucketHistogramUtils; +import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType.InstrumentTester; +import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType.TestInstrumentsState; +import java.util.List; +import java.util.Random; + +public class ExplicitBucketHistogramTester implements InstrumentTester { + + static class ExplicitHistogramState implements TestInstrumentsState { + public double maxBucketValue; + DoubleHistogram doubleHistogram; + } + + private static final int measurementsPerAttributeSet = 1_000; + + @Override + public Aggregation testedAggregation() { + return Aggregation.explicitBucketHistogram(); + } + + @Override + public TestInstrumentsState buildInstruments( + double instrumentCount, + SdkMeterProvider sdkMeterProvider, + List attributesList, + Random random) { + ExplicitHistogramState state = new ExplicitHistogramState(); + state.doubleHistogram = + sdkMeterProvider.get("meter").histogramBuilder("test.explicit.histogram").build(); + state.maxBucketValue = + ExplicitBucketHistogramUtils.DEFAULT_HISTOGRAM_BUCKET_BOUNDARIES.get( + ExplicitBucketHistogramUtils.DEFAULT_HISTOGRAM_BUCKET_BOUNDARIES.size() - 1); + return state; + } + + @SuppressWarnings("ForLoopReplaceableByForEach") // This is for GC sensitivity testing: no streams + @Override + public void recordValuesInInstruments( + TestInstrumentsState testInstrumentsState, List attributesList, Random random) { + + ExplicitHistogramState state = (ExplicitHistogramState) testInstrumentsState; + + for (int j = 0; j < attributesList.size(); j++) { + Attributes attributes = attributesList.get(j); + for (int i = 0; i < measurementsPerAttributeSet; i++) { + state.doubleHistogram.record( + random.nextInt(Double.valueOf(state.maxBucketValue * 1.1).intValue()), attributes); + } + } + } +} diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/ExponentialHistogramTester.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/ExponentialHistogramTester.java new file mode 100644 index 00000000000..6cca5a35bd4 --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/ExponentialHistogramTester.java @@ -0,0 +1,57 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state.tester; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType; +import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType.InstrumentTester; +import java.util.List; +import java.util.Random; + +public class ExponentialHistogramTester implements InstrumentTester { + + static class ExponentialHistogramState implements TestInstrumentType.TestInstrumentsState { + DoubleHistogram doubleHistogram; + } + + private static final int measurementsPerAttributeSet = 1_000; + + @Override + public Aggregation testedAggregation() { + return Aggregation.base2ExponentialBucketHistogram(); + } + + @Override + public TestInstrumentType.TestInstrumentsState buildInstruments( + double instrumentCount, + SdkMeterProvider sdkMeterProvider, + List attributesList, + Random random) { + ExponentialHistogramState state = new ExponentialHistogramState(); + state.doubleHistogram = sdkMeterProvider.get("meter").histogramBuilder("testhistogram").build(); + return state; + } + + @SuppressWarnings("ForLoopReplaceableByForEach") // This is for GC sensitivity testing: no streams + @Override + public void recordValuesInInstruments( + TestInstrumentType.TestInstrumentsState testInstrumentsState, + List attributesList, + Random random) { + + ExponentialHistogramState state = (ExponentialHistogramState) testInstrumentsState; + + for (int j = 0; j < attributesList.size(); j++) { + Attributes attributes = attributesList.get(j); + for (int i = 0; i < measurementsPerAttributeSet; i++) { + state.doubleHistogram.record(random.nextInt(10_000), attributes); + } + } + } +} diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/LongLastValueTester.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/LongLastValueTester.java new file mode 100644 index 00000000000..c766f74e290 --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/LongLastValueTester.java @@ -0,0 +1,51 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state.tester; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType; +import java.util.List; +import java.util.Random; + +public class LongLastValueTester implements TestInstrumentType.InstrumentTester { + + @Override + public Aggregation testedAggregation() { + return Aggregation.lastValue(); + } + + @SuppressWarnings({"ForLoopReplaceableByForEach", "resource"}) + @Override + public TestInstrumentType.TestInstrumentsState buildInstruments( + double instrumentCount, + SdkMeterProvider sdkMeterProvider, + List attributesList, + Random random) { + Meter meter = sdkMeterProvider.meterBuilder("meter").build(); + meter + .gaugeBuilder("test.long.last.value") + .ofLongs() + .buildWithCallback( + observableLongMeasurement -> { + for (int j = 0; j < attributesList.size(); j++) { + observableLongMeasurement.record(1, attributesList.get(j)); + } + }); + + return new TestInstrumentType.EmptyInstrumentsState(); + } + + @Override + public void recordValuesInInstruments( + TestInstrumentType.TestInstrumentsState testInstrumentsState, + List attributesList, + Random random) { + // Recording is done by the callback define above + } +} diff --git a/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/LongSumTester.java b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/LongSumTester.java new file mode 100644 index 00000000000..2477020633e --- /dev/null +++ b/sdk/metrics/src/jmhBasedTest/java/io/opentelemetry/sdk/metrics/internal/state/tester/LongSumTester.java @@ -0,0 +1,58 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state.tester; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType; +import java.util.List; +import java.util.Random; + +public class LongSumTester implements TestInstrumentType.InstrumentTester { + private static final int measurementsPerAttributeSet = 1_000; + + static class LongSumState implements TestInstrumentType.TestInstrumentsState { + LongCounter longCounter; + } + + @Override + public Aggregation testedAggregation() { + return Aggregation.sum(); + } + + @Override + public TestInstrumentType.TestInstrumentsState buildInstruments( + double instrumentCount, + SdkMeterProvider sdkMeterProvider, + List attributesList, + Random random) { + LongSumState longSumState = new LongSumState(); + + Meter meter = sdkMeterProvider.meterBuilder("meter").build(); + longSumState.longCounter = meter.counterBuilder("test.long.sum").build(); + + return longSumState; + } + + @SuppressWarnings("ForLoopReplaceableByForEach") // This is for GC sensitivity testing: no streams + @Override + public void recordValuesInInstruments( + TestInstrumentType.TestInstrumentsState testInstrumentsState, + List attributesList, + Random random) { + LongSumState state = (LongSumState) testInstrumentsState; + + for (int j = 0; j < attributesList.size(); j++) { + Attributes attributes = attributesList.get(j); + for (int i = 0; i < measurementsPerAttributeSet; i++) { + state.longCounter.add(1, attributes); + } + } + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/AbstractInstrumentBuilder.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/AbstractInstrumentBuilder.java deleted file mode 100644 index db6a552bad2..00000000000 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/AbstractInstrumentBuilder.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.metrics; - -import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; -import io.opentelemetry.api.metrics.ObservableLongMeasurement; -import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; -import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; -import io.opentelemetry.sdk.metrics.internal.state.CallbackRegistration; -import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; -import io.opentelemetry.sdk.metrics.internal.state.SdkObservableMeasurement; -import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; -import java.util.Collections; -import java.util.function.BiFunction; -import java.util.function.Consumer; - -/** Helper to make implementing builders easier. */ -abstract class AbstractInstrumentBuilder> { - - static final String DEFAULT_UNIT = ""; - - private final MeterProviderSharedState meterProviderSharedState; - private final InstrumentType type; - private final InstrumentValueType valueType; - private String description; - private String unit; - - protected final MeterSharedState meterSharedState; - protected final String instrumentName; - protected final Advice.AdviceBuilder adviceBuilder; - - AbstractInstrumentBuilder( - MeterProviderSharedState meterProviderSharedState, - MeterSharedState meterSharedState, - InstrumentType type, - InstrumentValueType valueType, - String name, - String description, - String unit) { - this( - meterProviderSharedState, - meterSharedState, - type, - valueType, - name, - description, - unit, - Advice.builder()); - } - - AbstractInstrumentBuilder( - MeterProviderSharedState meterProviderSharedState, - MeterSharedState meterSharedState, - InstrumentType type, - InstrumentValueType valueType, - String name, - String description, - String unit, - Advice.AdviceBuilder adviceBuilder) { - this.type = type; - this.valueType = valueType; - this.instrumentName = name; - this.description = description; - this.unit = unit; - this.meterProviderSharedState = meterProviderSharedState; - this.meterSharedState = meterSharedState; - this.adviceBuilder = adviceBuilder; - } - - protected abstract BuilderT getThis(); - - public BuilderT setUnit(String unit) { - this.unit = unit; - return getThis(); - } - - public BuilderT setDescription(String description) { - this.description = description; - return getThis(); - } - - protected T swapBuilder(SwapBuilder swapper) { - return swapper.newBuilder( - meterProviderSharedState, - meterSharedState, - instrumentName, - description, - unit, - adviceBuilder); - } - - final I buildSynchronousInstrument( - BiFunction instrumentFactory) { - InstrumentDescriptor descriptor = - InstrumentDescriptor.create( - instrumentName, description, unit, type, valueType, adviceBuilder.build()); - WriteableMetricStorage storage = - meterSharedState.registerSynchronousMetricStorage(descriptor, meterProviderSharedState); - return instrumentFactory.apply(descriptor, storage); - } - - final SdkObservableInstrument registerDoubleAsynchronousInstrument( - InstrumentType type, Consumer updater) { - SdkObservableMeasurement sdkObservableMeasurement = buildObservableMeasurement(type); - Runnable runnable = () -> updater.accept(sdkObservableMeasurement); - CallbackRegistration callbackRegistration = - CallbackRegistration.create(Collections.singletonList(sdkObservableMeasurement), runnable); - meterSharedState.registerCallback(callbackRegistration); - return new SdkObservableInstrument(meterSharedState, callbackRegistration); - } - - final SdkObservableInstrument registerLongAsynchronousInstrument( - InstrumentType type, Consumer updater) { - SdkObservableMeasurement sdkObservableMeasurement = buildObservableMeasurement(type); - Runnable runnable = () -> updater.accept(sdkObservableMeasurement); - CallbackRegistration callbackRegistration = - CallbackRegistration.create(Collections.singletonList(sdkObservableMeasurement), runnable); - meterSharedState.registerCallback(callbackRegistration); - return new SdkObservableInstrument(meterSharedState, callbackRegistration); - } - - final SdkObservableMeasurement buildObservableMeasurement(InstrumentType type) { - InstrumentDescriptor descriptor = - InstrumentDescriptor.create( - instrumentName, description, unit, type, valueType, adviceBuilder.build()); - return meterSharedState.registerObservableMeasurement(descriptor); - } - - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{descriptor=" - + InstrumentDescriptor.create( - instrumentName, description, unit, type, valueType, adviceBuilder.build()) - + "}"; - } - - @FunctionalInterface - protected interface SwapBuilder { - T newBuilder( - MeterProviderSharedState meterProviderSharedState, - MeterSharedState meterSharedState, - String name, - String description, - String unit, - Advice.AdviceBuilder adviceBuilder); - } -} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkDoubleCounter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkDoubleCounter.java new file mode 100644 index 00000000000..050429f8f52 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkDoubleCounter.java @@ -0,0 +1,51 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleCounter; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleCounterBuilder; +import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; +import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; +import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; +import java.util.List; + +final class ExtendedSdkDoubleCounter extends SdkDoubleCounter implements ExtendedDoubleCounter { + + private ExtendedSdkDoubleCounter( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { + super(descriptor, sdkMeter, storage); + } + + @Override + public boolean isEnabled() { + return sdkMeter.isMeterEnabled() && storage.isEnabled(); + } + + static final class ExtendedSdkDoubleCounterBuilder extends SdkDoubleCounterBuilder + implements ExtendedDoubleCounterBuilder { + + ExtendedSdkDoubleCounterBuilder( + SdkMeter sdkMeter, + String name, + String description, + String unit, + Advice.AdviceBuilder adviceBuilder) { + super(sdkMeter, name, description, unit, adviceBuilder); + } + + @Override + public ExtendedSdkDoubleCounter build() { + return builder.buildSynchronousInstrument(ExtendedSdkDoubleCounter::new); + } + + @Override + public ExtendedDoubleCounterBuilder setAttributesAdvice(List> attributes) { + builder.setAdviceAttributes(attributes); + return this; + } + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkDoubleGauge.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkDoubleGauge.java new file mode 100644 index 00000000000..def79d8bb65 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkDoubleGauge.java @@ -0,0 +1,50 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleGauge; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleGaugeBuilder; +import io.opentelemetry.api.incubator.metrics.ExtendedLongGaugeBuilder; +import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; +import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; +import java.util.List; + +final class ExtendedSdkDoubleGauge extends SdkDoubleGauge implements ExtendedDoubleGauge { + + private ExtendedSdkDoubleGauge( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { + super(descriptor, sdkMeter, storage); + } + + @Override + public boolean isEnabled() { + return sdkMeter.isMeterEnabled() && storage.isEnabled(); + } + + static final class ExtendedSdkDoubleGaugeBuilder extends SdkDoubleGaugeBuilder + implements ExtendedDoubleGaugeBuilder { + ExtendedSdkDoubleGaugeBuilder(SdkMeter sdkMeter, String name) { + super(sdkMeter, name); + } + + @Override + public ExtendedSdkDoubleGauge build() { + return builder.buildSynchronousInstrument(ExtendedSdkDoubleGauge::new); + } + + @Override + public ExtendedDoubleGaugeBuilder setAttributesAdvice(List> attributes) { + builder.setAdviceAttributes(attributes); + return this; + } + + @Override + public ExtendedLongGaugeBuilder ofLongs() { + return builder.swapBuilder(ExtendedSdkLongGauge.ExtendedSdkLongGaugeBuilder::new); + } + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkDoubleHistogram.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkDoubleHistogram.java new file mode 100644 index 00000000000..76afb84214f --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkDoubleHistogram.java @@ -0,0 +1,52 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleHistogram; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleHistogramBuilder; +import io.opentelemetry.api.incubator.metrics.ExtendedLongHistogramBuilder; +import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; +import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; +import java.util.List; + +final class ExtendedSdkDoubleHistogram extends SdkDoubleHistogram + implements ExtendedDoubleHistogram { + + ExtendedSdkDoubleHistogram( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { + super(descriptor, sdkMeter, storage); + } + + @Override + public boolean isEnabled() { + return sdkMeter.isMeterEnabled() && storage.isEnabled(); + } + + static final class ExtendedSdkDoubleHistogramBuilder extends SdkDoubleHistogramBuilder + implements ExtendedDoubleHistogramBuilder { + + ExtendedSdkDoubleHistogramBuilder(SdkMeter sdkMeter, String name) { + super(sdkMeter, name); + } + + @Override + public ExtendedSdkDoubleHistogram build() { + return builder.buildSynchronousInstrument(ExtendedSdkDoubleHistogram::new); + } + + @Override + public ExtendedLongHistogramBuilder ofLongs() { + return builder.swapBuilder(ExtendedSdkLongHistogram.ExtendedSdkLongHistogramBuilder::new); + } + + @Override + public ExtendedDoubleHistogramBuilder setAttributesAdvice(List> attributes) { + builder.setAdviceAttributes(attributes); + return this; + } + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkDoubleUpDownCounter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkDoubleUpDownCounter.java new file mode 100644 index 00000000000..3e7ae4bdb11 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkDoubleUpDownCounter.java @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleUpDownCounter; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleUpDownCounterBuilder; +import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; +import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; +import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; +import java.util.List; + +final class ExtendedSdkDoubleUpDownCounter extends SdkDoubleUpDownCounter + implements ExtendedDoubleUpDownCounter { + + private ExtendedSdkDoubleUpDownCounter( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { + super(descriptor, sdkMeter, storage); + } + + @Override + public boolean isEnabled() { + return sdkMeter.isMeterEnabled() && storage.isEnabled(); + } + + static final class ExtendedSdkDoubleUpDownCounterBuilder extends SdkDoubleUpDownCounterBuilder + implements ExtendedDoubleUpDownCounterBuilder { + + ExtendedSdkDoubleUpDownCounterBuilder( + SdkMeter sdkMeter, + String name, + String description, + String unit, + Advice.AdviceBuilder adviceBuilder) { + super(sdkMeter, name, description, unit, adviceBuilder); + } + + @Override + public ExtendedDoubleUpDownCounter build() { + return builder.buildSynchronousInstrument(ExtendedSdkDoubleUpDownCounter::new); + } + + @Override + public ExtendedDoubleUpDownCounterBuilder setAttributesAdvice( + List> attributes) { + builder.setAdviceAttributes(attributes); + return this; + } + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkLongCounter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkLongCounter.java new file mode 100644 index 00000000000..f87e3407184 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkLongCounter.java @@ -0,0 +1,51 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleCounterBuilder; +import io.opentelemetry.api.incubator.metrics.ExtendedLongCounter; +import io.opentelemetry.api.incubator.metrics.ExtendedLongCounterBuilder; +import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; +import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; +import java.util.List; + +final class ExtendedSdkLongCounter extends SdkLongCounter implements ExtendedLongCounter { + + private ExtendedSdkLongCounter( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { + super(descriptor, sdkMeter, storage); + } + + @Override + public boolean isEnabled() { + return sdkMeter.isMeterEnabled() && storage.isEnabled(); + } + + static final class ExtendedSdkLongCounterBuilder extends SdkLongCounterBuilder + implements ExtendedLongCounterBuilder { + + ExtendedSdkLongCounterBuilder(SdkMeter sdkMeter, String name) { + super(sdkMeter, name); + } + + @Override + public ExtendedSdkLongCounter build() { + return builder.buildSynchronousInstrument(ExtendedSdkLongCounter::new); + } + + @Override + public ExtendedDoubleCounterBuilder ofDoubles() { + return builder.swapBuilder(ExtendedSdkDoubleCounter.ExtendedSdkDoubleCounterBuilder::new); + } + + @Override + public ExtendedLongCounterBuilder setAttributesAdvice(List> attributes) { + builder.setAdviceAttributes(attributes); + return this; + } + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkLongGauge.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkLongGauge.java new file mode 100644 index 00000000000..94845f4bef5 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkLongGauge.java @@ -0,0 +1,51 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.incubator.metrics.ExtendedLongGauge; +import io.opentelemetry.api.incubator.metrics.ExtendedLongGaugeBuilder; +import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; +import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; +import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; +import java.util.List; + +final class ExtendedSdkLongGauge extends SdkLongGauge implements ExtendedLongGauge { + + private ExtendedSdkLongGauge( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { + super(descriptor, sdkMeter, storage); + } + + @Override + public boolean isEnabled() { + return sdkMeter.isMeterEnabled() && storage.isEnabled(); + } + + static final class ExtendedSdkLongGaugeBuilder extends SdkLongGaugeBuilder + implements ExtendedLongGaugeBuilder { + + ExtendedSdkLongGaugeBuilder( + SdkMeter sdkMeter, + String name, + String description, + String unit, + Advice.AdviceBuilder adviceBuilder) { + super(sdkMeter, name, description, unit, adviceBuilder); + } + + @Override + public ExtendedSdkLongGauge build() { + return builder.buildSynchronousInstrument(ExtendedSdkLongGauge::new); + } + + @Override + public ExtendedLongGaugeBuilder setAttributesAdvice(List> attributes) { + builder.setAdviceAttributes(attributes); + return this; + } + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkLongHistogram.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkLongHistogram.java new file mode 100644 index 00000000000..e10851efd54 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkLongHistogram.java @@ -0,0 +1,51 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.incubator.metrics.ExtendedLongHistogram; +import io.opentelemetry.api.incubator.metrics.ExtendedLongHistogramBuilder; +import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; +import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; +import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; +import java.util.List; + +final class ExtendedSdkLongHistogram extends SdkLongHistogram implements ExtendedLongHistogram { + + private ExtendedSdkLongHistogram( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { + super(descriptor, sdkMeter, storage); + } + + @Override + public boolean isEnabled() { + return sdkMeter.isMeterEnabled() && storage.isEnabled(); + } + + static final class ExtendedSdkLongHistogramBuilder extends SdkLongHistogramBuilder + implements ExtendedLongHistogramBuilder { + + ExtendedSdkLongHistogramBuilder( + SdkMeter sdkMeter, + String name, + String description, + String unit, + Advice.AdviceBuilder adviceBuilder) { + super(sdkMeter, name, description, unit, adviceBuilder); + } + + @Override + public ExtendedSdkLongHistogram build() { + return builder.buildSynchronousInstrument(ExtendedSdkLongHistogram::new); + } + + @Override + public ExtendedLongHistogramBuilder setAttributesAdvice(List> attributes) { + builder.setAdviceAttributes(attributes); + return this; + } + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkLongUpDownCounter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkLongUpDownCounter.java new file mode 100644 index 00000000000..53be08fed63 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ExtendedSdkLongUpDownCounter.java @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleUpDownCounterBuilder; +import io.opentelemetry.api.incubator.metrics.ExtendedLongUpDownCounter; +import io.opentelemetry.api.incubator.metrics.ExtendedLongUpDownCounterBuilder; +import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; +import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; +import java.util.List; + +final class ExtendedSdkLongUpDownCounter extends SdkLongUpDownCounter + implements ExtendedLongUpDownCounter { + + private ExtendedSdkLongUpDownCounter( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { + super(descriptor, sdkMeter, storage); + } + + @Override + public boolean isEnabled() { + return sdkMeter.isMeterEnabled() && storage.isEnabled(); + } + + static final class ExtendedSdkLongUpDownCounterBuilder extends SdkLongUpDownCounterBuilder + implements ExtendedLongUpDownCounterBuilder { + + ExtendedSdkLongUpDownCounterBuilder(SdkMeter sdkMeter, String name) { + super(sdkMeter, name); + } + + @Override + public ExtendedLongUpDownCounter build() { + return builder.buildSynchronousInstrument(ExtendedSdkLongUpDownCounter::new); + } + + @Override + public ExtendedDoubleUpDownCounterBuilder ofDoubles() { + return builder.swapBuilder( + ExtendedSdkDoubleUpDownCounter.ExtendedSdkDoubleUpDownCounterBuilder::new); + } + + @Override + public ExtendedLongUpDownCounterBuilder setAttributesAdvice(List> attributes) { + builder.setAdviceAttributes(attributes); + return this; + } + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/IncubatingUtil.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/IncubatingUtil.java new file mode 100644 index 00000000000..03a88bb9382 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/IncubatingUtil.java @@ -0,0 +1,39 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import io.opentelemetry.api.metrics.DoubleGaugeBuilder; +import io.opentelemetry.api.metrics.DoubleHistogramBuilder; +import io.opentelemetry.api.metrics.LongCounterBuilder; +import io.opentelemetry.api.metrics.LongUpDownCounterBuilder; + +/** + * Utilities for interacting with {@code io.opentelemetry:opentelemetry-api-incubator}, which is not + * guaranteed to be present on the classpath. For all methods, callers MUST first separately + * reflectively confirm that the incubator is available on the classpath. + */ +final class IncubatingUtil { + + private IncubatingUtil() {} + + static LongCounterBuilder createExtendedLongCounterBuilder(SdkMeter sdkMeter, String name) { + return new ExtendedSdkLongCounter.ExtendedSdkLongCounterBuilder(sdkMeter, name); + } + + static LongUpDownCounterBuilder createExtendedLongUpDownCounterBuilder( + SdkMeter sdkMeter, String name) { + return new ExtendedSdkLongUpDownCounter.ExtendedSdkLongUpDownCounterBuilder(sdkMeter, name); + } + + static DoubleHistogramBuilder createExtendedDoubleHistogramBuilder( + SdkMeter sdkMeter, String name) { + return new ExtendedSdkDoubleHistogram.ExtendedSdkDoubleHistogramBuilder(sdkMeter, name); + } + + static DoubleGaugeBuilder createExtendedDoubleGaugeBuilder(SdkMeter sdkMeter, String name) { + return new ExtendedSdkDoubleGauge.ExtendedSdkDoubleGaugeBuilder(sdkMeter, name); + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/InstrumentBuilder.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/InstrumentBuilder.java new file mode 100644 index 00000000000..14fd54d7bd1 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/InstrumentBuilder.java @@ -0,0 +1,131 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; +import io.opentelemetry.api.metrics.ObservableLongMeasurement; +import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; +import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; +import io.opentelemetry.sdk.metrics.internal.state.CallbackRegistration; +import io.opentelemetry.sdk.metrics.internal.state.SdkObservableMeasurement; +import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; +import java.util.Collections; +import java.util.List; +import java.util.function.Consumer; + +/** Helper to make implementing builders easier. */ +final class InstrumentBuilder { + + private final String name; + private final SdkMeter sdkMeter; + private final InstrumentValueType valueType; + private InstrumentType type; + private Advice.AdviceBuilder adviceBuilder = Advice.builder(); + private String description = ""; + private String unit = ""; + + InstrumentBuilder( + String name, InstrumentType type, InstrumentValueType valueType, SdkMeter sdkMeter) { + this.name = name; + this.type = type; + this.valueType = valueType; + this.sdkMeter = sdkMeter; + } + + InstrumentBuilder setUnit(String unit) { + this.unit = unit; + return this; + } + + InstrumentBuilder setAdviceBuilder(Advice.AdviceBuilder adviceBuilder) { + this.adviceBuilder = adviceBuilder; + return this; + } + + InstrumentBuilder setDescription(String description) { + this.description = description; + return this; + } + + T swapBuilder(SwapBuilder swapper) { + return swapper.newBuilder(sdkMeter, name, description, unit, adviceBuilder); + } + + @FunctionalInterface + interface SynchronousInstrumentConstructor { + + I createInstrument( + InstrumentDescriptor instrumentDescriptor, + SdkMeter sdkMeter, + WriteableMetricStorage storage); + } + + I buildSynchronousInstrument( + SynchronousInstrumentConstructor instrumentFactory) { + InstrumentDescriptor descriptor = newDescriptor(); + WriteableMetricStorage storage = sdkMeter.registerSynchronousMetricStorage(descriptor); + return instrumentFactory.createInstrument(descriptor, sdkMeter, storage); + } + + SdkObservableInstrument buildDoubleAsynchronousInstrument( + InstrumentType type, Consumer updater) { + SdkObservableMeasurement sdkObservableMeasurement = buildObservableMeasurement(type); + Runnable runnable = () -> updater.accept(sdkObservableMeasurement); + CallbackRegistration callbackRegistration = + CallbackRegistration.create(Collections.singletonList(sdkObservableMeasurement), runnable); + sdkMeter.registerCallback(callbackRegistration); + return new SdkObservableInstrument(sdkMeter, callbackRegistration); + } + + SdkObservableInstrument buildLongAsynchronousInstrument( + InstrumentType type, Consumer updater) { + SdkObservableMeasurement sdkObservableMeasurement = buildObservableMeasurement(type); + Runnable runnable = () -> updater.accept(sdkObservableMeasurement); + CallbackRegistration callbackRegistration = + CallbackRegistration.create(Collections.singletonList(sdkObservableMeasurement), runnable); + sdkMeter.registerCallback(callbackRegistration); + return new SdkObservableInstrument(sdkMeter, callbackRegistration); + } + + SdkObservableMeasurement buildObservableMeasurement(InstrumentType type) { + this.type = type; + InstrumentDescriptor descriptor = newDescriptor(); + return sdkMeter.registerObservableMeasurement(descriptor); + } + + private InstrumentDescriptor newDescriptor() { + return InstrumentDescriptor.create( + name, description, unit, type, valueType, adviceBuilder.build()); + } + + @Override + public String toString() { + return toStringHelper(getClass().getSimpleName()); + } + + String toStringHelper(String className) { + return className + "{descriptor=" + newDescriptor() + "}"; + } + + @FunctionalInterface + interface SwapBuilder { + T newBuilder( + SdkMeter sdkMeter, + String name, + String description, + String unit, + Advice.AdviceBuilder adviceBuilder); + } + + void setAdviceAttributes(List> attributes) { + adviceBuilder.setAttributes(attributes); + } + + void setExplicitBucketBoundaries(List bucketBoundaries) { + adviceBuilder.setExplicitBucketBoundaries(bucketBoundaries); + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/InstrumentType.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/InstrumentType.java index d2218ec8773..6b87ce20c9c 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/InstrumentType.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/InstrumentType.java @@ -10,6 +10,7 @@ * * @since 1.14.0 */ +@SuppressWarnings({"MissingSummary", "SummaryJavadoc"}) public enum InstrumentType { COUNTER, UP_DOWN_COUNTER, @@ -17,4 +18,8 @@ public enum InstrumentType { OBSERVABLE_COUNTER, OBSERVABLE_UP_DOWN_COUNTER, OBSERVABLE_GAUGE, + /** + * @since 1.38.0 + */ + GAUGE, } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleCounter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleCounter.java index 04372eeb886..7fd6e435556 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleCounter.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleCounter.java @@ -5,34 +5,31 @@ package io.opentelemetry.sdk.metrics; -import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.DoubleCounter; import io.opentelemetry.api.metrics.DoubleCounterBuilder; import io.opentelemetry.api.metrics.ObservableDoubleCounter; import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; import io.opentelemetry.context.Context; -import io.opentelemetry.extension.incubator.metrics.DoubleCounterAdviceConfigurer; -import io.opentelemetry.extension.incubator.metrics.ExtendedDoubleCounterBuilder; import io.opentelemetry.sdk.internal.ThrottlingLogger; import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; -import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; -import java.util.List; import java.util.function.Consumer; import java.util.logging.Level; import java.util.logging.Logger; -final class SdkDoubleCounter extends AbstractInstrument implements DoubleCounter { +class SdkDoubleCounter extends AbstractInstrument implements DoubleCounter { private static final Logger logger = Logger.getLogger(SdkDoubleCounter.class.getName()); private final ThrottlingLogger throttlingLogger = new ThrottlingLogger(logger); - private final WriteableMetricStorage storage; + final SdkMeter sdkMeter; + final WriteableMetricStorage storage; - private SdkDoubleCounter(InstrumentDescriptor descriptor, WriteableMetricStorage storage) { + SdkDoubleCounter( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { super(descriptor); + this.sdkMeter = sdkMeter; this.storage = storage; } @@ -59,59 +56,54 @@ public void add(double increment) { add(increment, Attributes.empty()); } - static final class SdkDoubleCounterBuilder - extends AbstractInstrumentBuilder - implements ExtendedDoubleCounterBuilder, DoubleCounterAdviceConfigurer { + static class SdkDoubleCounterBuilder implements DoubleCounterBuilder { + + final InstrumentBuilder builder; SdkDoubleCounterBuilder( - MeterProviderSharedState meterProviderSharedState, - MeterSharedState sharedState, + SdkMeter sdkMeter, String name, String description, String unit, Advice.AdviceBuilder adviceBuilder) { - super( - meterProviderSharedState, - sharedState, - InstrumentType.COUNTER, - InstrumentValueType.DOUBLE, - name, - description, - unit, - adviceBuilder); + this.builder = + new InstrumentBuilder(name, InstrumentType.COUNTER, InstrumentValueType.DOUBLE, sdkMeter) + .setUnit(unit) + .setDescription(description) + .setAdviceBuilder(adviceBuilder); } @Override - protected SdkDoubleCounterBuilder getThis() { - return this; + public SdkDoubleCounter build() { + return builder.buildSynchronousInstrument(SdkDoubleCounter::new); } @Override - public DoubleCounterBuilder setAdvice(Consumer adviceConsumer) { - adviceConsumer.accept(this); + public DoubleCounterBuilder setDescription(String description) { + builder.setDescription(description); return this; } @Override - public SdkDoubleCounter build() { - return buildSynchronousInstrument(SdkDoubleCounter::new); + public DoubleCounterBuilder setUnit(String unit) { + builder.setUnit(unit); + return this; } @Override public ObservableDoubleCounter buildWithCallback( Consumer callback) { - return registerDoubleAsynchronousInstrument(InstrumentType.OBSERVABLE_COUNTER, callback); + return builder.buildDoubleAsynchronousInstrument(InstrumentType.OBSERVABLE_COUNTER, callback); } @Override public ObservableDoubleMeasurement buildObserver() { - return buildObservableMeasurement(InstrumentType.OBSERVABLE_COUNTER); + return builder.buildObservableMeasurement(InstrumentType.OBSERVABLE_COUNTER); } @Override - public DoubleCounterAdviceConfigurer setAttributes(List> attributes) { - adviceBuilder.setAttributes(attributes); - return this; + public String toString() { + return builder.toStringHelper(getClass().getSimpleName()); } } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleGauge.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleGauge.java index a7456008da4..97076603bb5 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleGauge.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleGauge.java @@ -5,35 +5,37 @@ package io.opentelemetry.sdk.metrics; -import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleGauge; import io.opentelemetry.api.metrics.DoubleGaugeBuilder; import io.opentelemetry.api.metrics.LongGaugeBuilder; import io.opentelemetry.api.metrics.ObservableDoubleGauge; import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; import io.opentelemetry.context.Context; -import io.opentelemetry.extension.incubator.metrics.DoubleGauge; -import io.opentelemetry.extension.incubator.metrics.DoubleGaugeAdviceConfigurer; -import io.opentelemetry.extension.incubator.metrics.ExtendedDoubleGaugeBuilder; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; -import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; -import java.util.List; import java.util.function.Consumer; -final class SdkDoubleGauge extends AbstractInstrument implements DoubleGauge { +class SdkDoubleGauge extends AbstractInstrument implements DoubleGauge { - private final WriteableMetricStorage storage; + final SdkMeter sdkMeter; + final WriteableMetricStorage storage; - private SdkDoubleGauge(InstrumentDescriptor descriptor, WriteableMetricStorage storage) { + SdkDoubleGauge( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { super(descriptor); + this.sdkMeter = sdkMeter; this.storage = storage; } @Override - public void set(double increment, Attributes attributes) { - storage.recordDouble(increment, attributes, Context.root()); + public void set(double value, Attributes attributes) { + storage.recordDouble(value, attributes, Context.current()); + } + + @Override + public void set(double value, Attributes attributes, Context context) { + storage.recordDouble(value, attributes, context); } @Override @@ -41,61 +43,49 @@ public void set(double increment) { set(increment, Attributes.empty()); } - static final class SdkDoubleGaugeBuilder extends AbstractInstrumentBuilder - implements ExtendedDoubleGaugeBuilder, DoubleGaugeAdviceConfigurer { - - SdkDoubleGaugeBuilder( - MeterProviderSharedState meterProviderSharedState, - MeterSharedState meterSharedState, - String name) { - super( - meterProviderSharedState, - meterSharedState, - // TODO: use InstrumentType.GAUGE when available - InstrumentType.OBSERVABLE_GAUGE, - InstrumentValueType.DOUBLE, - name, - "", - DEFAULT_UNIT); - } + static class SdkDoubleGaugeBuilder implements DoubleGaugeBuilder { + final InstrumentBuilder builder; - @Override - protected SdkDoubleGaugeBuilder getThis() { - return this; + SdkDoubleGaugeBuilder(SdkMeter sdkMeter, String name) { + builder = + new InstrumentBuilder(name, InstrumentType.GAUGE, InstrumentValueType.DOUBLE, sdkMeter); } @Override - public SdkDoubleGauge build() { - return buildSynchronousInstrument(SdkDoubleGauge::new); + public DoubleGaugeBuilder setDescription(String description) { + builder.setDescription(description); + return this; } @Override - public DoubleGaugeBuilder setAdvice(Consumer adviceConsumer) { - adviceConsumer.accept(this); + public DoubleGaugeBuilder setUnit(String unit) { + builder.setUnit(unit); return this; } @Override - public DoubleGaugeAdviceConfigurer setAttributes(List> attributes) { - adviceBuilder.setAttributes(attributes); - return this; + public SdkDoubleGauge build() { + return builder.buildSynchronousInstrument(SdkDoubleGauge::new); } @Override public LongGaugeBuilder ofLongs() { - return swapBuilder(SdkLongGauge.SdkLongGaugeBuilder::new); + return builder.swapBuilder(SdkLongGauge.SdkLongGaugeBuilder::new); } @Override public ObservableDoubleGauge buildWithCallback(Consumer callback) { - // TODO: use InstrumentType.GAUGE when available - return registerDoubleAsynchronousInstrument(InstrumentType.OBSERVABLE_GAUGE, callback); + return builder.buildDoubleAsynchronousInstrument(InstrumentType.OBSERVABLE_GAUGE, callback); } @Override public ObservableDoubleMeasurement buildObserver() { - // TODO: use InstrumentType.GAUGE when available - return buildObservableMeasurement(InstrumentType.OBSERVABLE_GAUGE); + return builder.buildObservableMeasurement(InstrumentType.OBSERVABLE_GAUGE); + } + + @Override + public String toString() { + return builder.toStringHelper(getClass().getSimpleName()); } } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleHistogram.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleHistogram.java index e9b671a3d6e..de4473ace2e 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleHistogram.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleHistogram.java @@ -5,31 +5,31 @@ package io.opentelemetry.sdk.metrics; -import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.DoubleHistogramBuilder; import io.opentelemetry.api.metrics.LongHistogramBuilder; import io.opentelemetry.context.Context; -import io.opentelemetry.extension.incubator.metrics.DoubleHistogramAdviceConfigurer; -import io.opentelemetry.extension.incubator.metrics.ExtendedDoubleHistogramBuilder; import io.opentelemetry.sdk.internal.ThrottlingLogger; +import io.opentelemetry.sdk.metrics.internal.aggregator.ExplicitBucketHistogramUtils; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; -import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; import java.util.List; -import java.util.function.Consumer; +import java.util.Objects; import java.util.logging.Level; import java.util.logging.Logger; -final class SdkDoubleHistogram extends AbstractInstrument implements DoubleHistogram { +class SdkDoubleHistogram extends AbstractInstrument implements DoubleHistogram { private static final Logger logger = Logger.getLogger(SdkDoubleHistogram.class.getName()); private final ThrottlingLogger throttlingLogger = new ThrottlingLogger(logger); - private final WriteableMetricStorage storage; + final SdkMeter sdkMeter; + final WriteableMetricStorage storage; - private SdkDoubleHistogram(InstrumentDescriptor descriptor, WriteableMetricStorage storage) { + SdkDoubleHistogram( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { super(descriptor); + this.sdkMeter = sdkMeter; this.storage = storage; } @@ -56,57 +56,54 @@ public void record(double value) { record(value, Attributes.empty()); } - static final class SdkDoubleHistogramBuilder - extends AbstractInstrumentBuilder - implements ExtendedDoubleHistogramBuilder, DoubleHistogramAdviceConfigurer { - - SdkDoubleHistogramBuilder( - MeterProviderSharedState meterProviderSharedState, - MeterSharedState meterSharedState, - String name) { - super( - meterProviderSharedState, - meterSharedState, - InstrumentType.HISTOGRAM, - InstrumentValueType.DOUBLE, - name, - "", - DEFAULT_UNIT); + static class SdkDoubleHistogramBuilder implements DoubleHistogramBuilder { + + final InstrumentBuilder builder; + + SdkDoubleHistogramBuilder(SdkMeter sdkMeter, String name) { + builder = + new InstrumentBuilder( + name, InstrumentType.HISTOGRAM, InstrumentValueType.DOUBLE, sdkMeter); } @Override - protected SdkDoubleHistogramBuilder getThis() { + public DoubleHistogramBuilder setDescription(String description) { + builder.setDescription(description); return this; } @Override - public SdkDoubleHistogramBuilder setAdvice( - Consumer adviceConsumer) { - adviceConsumer.accept(this); + public DoubleHistogramBuilder setUnit(String unit) { + builder.setUnit(unit); return this; } @Override public SdkDoubleHistogram build() { - return buildSynchronousInstrument(SdkDoubleHistogram::new); + return builder.buildSynchronousInstrument(SdkDoubleHistogram::new); } @Override public LongHistogramBuilder ofLongs() { - return swapBuilder(SdkLongHistogram.SdkLongHistogramBuilder::new); + return builder.swapBuilder(SdkLongHistogram.SdkLongHistogramBuilder::new); } @Override - public DoubleHistogramAdviceConfigurer setExplicitBucketBoundaries( - List bucketBoundaries) { - adviceBuilder.setExplicitBucketBoundaries(bucketBoundaries); + public DoubleHistogramBuilder setExplicitBucketBoundariesAdvice(List bucketBoundaries) { + try { + Objects.requireNonNull(bucketBoundaries, "bucketBoundaries must not be null"); + ExplicitBucketHistogramUtils.validateBucketBoundaries(bucketBoundaries); + } catch (IllegalArgumentException | NullPointerException e) { + logger.warning("Error setting explicit bucket boundaries advice: " + e.getMessage()); + return this; + } + builder.setExplicitBucketBoundaries(bucketBoundaries); return this; } @Override - public DoubleHistogramAdviceConfigurer setAttributes(List> attributes) { - adviceBuilder.setAttributes(attributes); - return this; + public String toString() { + return builder.toStringHelper(getClass().getSimpleName()); } } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleUpDownCounter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleUpDownCounter.java index 22a6b68b6e3..4231f58fdc0 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleUpDownCounter.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkDoubleUpDownCounter.java @@ -5,29 +5,26 @@ package io.opentelemetry.sdk.metrics; -import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.DoubleUpDownCounter; import io.opentelemetry.api.metrics.DoubleUpDownCounterBuilder; import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; import io.opentelemetry.api.metrics.ObservableDoubleUpDownCounter; import io.opentelemetry.context.Context; -import io.opentelemetry.extension.incubator.metrics.DoubleUpDownCounterAdviceConfigurer; -import io.opentelemetry.extension.incubator.metrics.ExtendedDoubleUpDownCounterBuilder; import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; -import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; -import java.util.List; import java.util.function.Consumer; -final class SdkDoubleUpDownCounter extends AbstractInstrument implements DoubleUpDownCounter { +class SdkDoubleUpDownCounter extends AbstractInstrument implements DoubleUpDownCounter { - private final WriteableMetricStorage storage; + final SdkMeter sdkMeter; + final WriteableMetricStorage storage; - private SdkDoubleUpDownCounter(InstrumentDescriptor descriptor, WriteableMetricStorage storage) { + SdkDoubleUpDownCounter( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { super(descriptor); + this.sdkMeter = sdkMeter; this.storage = storage; } @@ -46,61 +43,56 @@ public void add(double increment) { add(increment, Attributes.empty()); } - static final class SdkDoubleUpDownCounterBuilder - extends AbstractInstrumentBuilder - implements ExtendedDoubleUpDownCounterBuilder, DoubleUpDownCounterAdviceConfigurer { + static class SdkDoubleUpDownCounterBuilder implements DoubleUpDownCounterBuilder { + + final InstrumentBuilder builder; SdkDoubleUpDownCounterBuilder( - MeterProviderSharedState meterProviderSharedState, - MeterSharedState sharedState, + SdkMeter sdkMeter, String name, String description, String unit, Advice.AdviceBuilder adviceBuilder) { - super( - meterProviderSharedState, - sharedState, - InstrumentType.UP_DOWN_COUNTER, - InstrumentValueType.DOUBLE, - name, - description, - unit, - adviceBuilder); + this.builder = + new InstrumentBuilder( + name, InstrumentType.UP_DOWN_COUNTER, InstrumentValueType.DOUBLE, sdkMeter) + .setDescription(description) + .setUnit(unit) + .setAdviceBuilder(adviceBuilder); } @Override - protected SdkDoubleUpDownCounterBuilder getThis() { + public DoubleUpDownCounterBuilder setDescription(String description) { + builder.setDescription(description); return this; } @Override - public DoubleUpDownCounterBuilder setAdvice( - Consumer adviceConsumer) { - adviceConsumer.accept(this); + public DoubleUpDownCounterBuilder setUnit(String unit) { + builder.setUnit(unit); return this; } @Override public DoubleUpDownCounter build() { - return buildSynchronousInstrument(SdkDoubleUpDownCounter::new); + return builder.buildSynchronousInstrument(SdkDoubleUpDownCounter::new); } @Override public ObservableDoubleUpDownCounter buildWithCallback( Consumer callback) { - return registerDoubleAsynchronousInstrument( + return builder.buildDoubleAsynchronousInstrument( InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, callback); } @Override public ObservableDoubleMeasurement buildObserver() { - return buildObservableMeasurement(InstrumentType.OBSERVABLE_UP_DOWN_COUNTER); + return builder.buildObservableMeasurement(InstrumentType.OBSERVABLE_UP_DOWN_COUNTER); } @Override - public DoubleUpDownCounterAdviceConfigurer setAttributes(List> attributes) { - adviceBuilder.setAttributes(attributes); - return this; + public String toString() { + return builder.toStringHelper(getClass().getSimpleName()); } } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongCounter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongCounter.java index dcdca53858a..0de901060f5 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongCounter.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongCounter.java @@ -5,7 +5,6 @@ package io.opentelemetry.sdk.metrics; -import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.DoubleCounterBuilder; import io.opentelemetry.api.metrics.LongCounter; @@ -13,27 +12,25 @@ import io.opentelemetry.api.metrics.ObservableLongCounter; import io.opentelemetry.api.metrics.ObservableLongMeasurement; import io.opentelemetry.context.Context; -import io.opentelemetry.extension.incubator.metrics.ExtendedLongCounterBuilder; -import io.opentelemetry.extension.incubator.metrics.LongCounterAdviceConfigurer; import io.opentelemetry.sdk.internal.ThrottlingLogger; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; -import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; -import java.util.List; import java.util.function.Consumer; import java.util.logging.Level; import java.util.logging.Logger; -final class SdkLongCounter extends AbstractInstrument implements LongCounter { +class SdkLongCounter extends AbstractInstrument implements LongCounter { private static final Logger logger = Logger.getLogger(SdkLongCounter.class.getName()); private final ThrottlingLogger throttlingLogger = new ThrottlingLogger(logger); - private final WriteableMetricStorage storage; + final SdkMeter sdkMeter; + final WriteableMetricStorage storage; - private SdkLongCounter(InstrumentDescriptor descriptor, WriteableMetricStorage storage) { + SdkLongCounter( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { super(descriptor); + this.sdkMeter = sdkMeter; this.storage = storage; } @@ -60,58 +57,50 @@ public void add(long increment) { add(increment, Attributes.empty()); } - static final class SdkLongCounterBuilder extends AbstractInstrumentBuilder - implements ExtendedLongCounterBuilder, LongCounterAdviceConfigurer { - - SdkLongCounterBuilder( - MeterProviderSharedState meterProviderSharedState, - MeterSharedState meterSharedState, - String name) { - super( - meterProviderSharedState, - meterSharedState, - InstrumentType.COUNTER, - InstrumentValueType.LONG, - name, - "", - DEFAULT_UNIT); + static class SdkLongCounterBuilder implements LongCounterBuilder { + + final InstrumentBuilder builder; + + SdkLongCounterBuilder(SdkMeter sdkMeter, String name) { + this.builder = + new InstrumentBuilder(name, InstrumentType.COUNTER, InstrumentValueType.LONG, sdkMeter); } @Override - protected SdkLongCounterBuilder getThis() { + public LongCounterBuilder setDescription(String description) { + builder.setDescription(description); return this; } @Override - public LongCounterBuilder setAdvice(Consumer adviceConsumer) { - adviceConsumer.accept(this); + public LongCounterBuilder setUnit(String unit) { + builder.setUnit(unit); return this; } @Override public SdkLongCounter build() { - return buildSynchronousInstrument(SdkLongCounter::new); + return builder.buildSynchronousInstrument(SdkLongCounter::new); } @Override public DoubleCounterBuilder ofDoubles() { - return swapBuilder(SdkDoubleCounter.SdkDoubleCounterBuilder::new); + return builder.swapBuilder(SdkDoubleCounter.SdkDoubleCounterBuilder::new); } @Override public ObservableLongCounter buildWithCallback(Consumer callback) { - return registerLongAsynchronousInstrument(InstrumentType.OBSERVABLE_COUNTER, callback); + return builder.buildLongAsynchronousInstrument(InstrumentType.OBSERVABLE_COUNTER, callback); } @Override public ObservableLongMeasurement buildObserver() { - return buildObservableMeasurement(InstrumentType.OBSERVABLE_COUNTER); + return builder.buildObservableMeasurement(InstrumentType.OBSERVABLE_COUNTER); } @Override - public LongCounterAdviceConfigurer setAttributes(List> attributes) { - adviceBuilder.setAttributes(attributes); - return this; + public String toString() { + return builder.toStringHelper(getClass().getSimpleName()); } } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongGauge.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongGauge.java index 475f80bba1f..5c6aafbb9ef 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongGauge.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongGauge.java @@ -5,35 +5,36 @@ package io.opentelemetry.sdk.metrics; -import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.LongGauge; import io.opentelemetry.api.metrics.LongGaugeBuilder; import io.opentelemetry.api.metrics.ObservableLongGauge; import io.opentelemetry.api.metrics.ObservableLongMeasurement; import io.opentelemetry.context.Context; -import io.opentelemetry.extension.incubator.metrics.ExtendedLongGaugeBuilder; -import io.opentelemetry.extension.incubator.metrics.LongGauge; -import io.opentelemetry.extension.incubator.metrics.LongGaugeAdviceConfigurer; import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; -import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; -import java.util.List; import java.util.function.Consumer; -final class SdkLongGauge extends AbstractInstrument implements LongGauge { +class SdkLongGauge extends AbstractInstrument implements LongGauge { - private final WriteableMetricStorage storage; + final SdkMeter sdkMeter; + final WriteableMetricStorage storage; - private SdkLongGauge(InstrumentDescriptor descriptor, WriteableMetricStorage storage) { + SdkLongGauge(InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { super(descriptor); + this.sdkMeter = sdkMeter; this.storage = storage; } @Override - public void set(long increment, Attributes attributes) { - storage.recordLong(increment, attributes, Context.root()); + public void set(long value, Attributes attributes) { + storage.recordLong(value, attributes, Context.current()); + } + + @Override + public void set(long value, Attributes attributes, Context context) { + storage.recordLong(value, attributes, context); } @Override @@ -41,60 +42,53 @@ public void set(long increment) { set(increment, Attributes.empty()); } - static final class SdkLongGaugeBuilder extends AbstractInstrumentBuilder - implements ExtendedLongGaugeBuilder, LongGaugeAdviceConfigurer { + static class SdkLongGaugeBuilder implements LongGaugeBuilder { + + final InstrumentBuilder builder; SdkLongGaugeBuilder( - MeterProviderSharedState meterProviderSharedState, - MeterSharedState sharedState, + SdkMeter sdkMeter, String name, String description, String unit, Advice.AdviceBuilder adviceBuilder) { - super( - meterProviderSharedState, - sharedState, - // TODO: use InstrumentType.GAUGE when available - InstrumentType.OBSERVABLE_GAUGE, - InstrumentValueType.LONG, - name, - description, - unit, - adviceBuilder); + builder = + new InstrumentBuilder(name, InstrumentType.GAUGE, InstrumentValueType.LONG, sdkMeter) + .setDescription(description) + .setUnit(unit) + .setAdviceBuilder(adviceBuilder); } @Override - protected SdkLongGaugeBuilder getThis() { + public LongGaugeBuilder setDescription(String description) { + builder.setDescription(description); return this; } @Override - public SdkLongGauge build() { - return buildSynchronousInstrument(SdkLongGauge::new); - } - - @Override - public LongGaugeBuilder setAdvice(Consumer adviceConsumer) { - adviceConsumer.accept(this); + public LongGaugeBuilder setUnit(String unit) { + builder.setUnit(unit); return this; } @Override - public LongGaugeAdviceConfigurer setAttributes(List> attributes) { - adviceBuilder.setAttributes(attributes); - return this; + public SdkLongGauge build() { + return builder.buildSynchronousInstrument(SdkLongGauge::new); } @Override public ObservableLongGauge buildWithCallback(Consumer callback) { - // TODO: use InstrumentType.GAUGE when available - return registerLongAsynchronousInstrument(InstrumentType.OBSERVABLE_GAUGE, callback); + return builder.buildLongAsynchronousInstrument(InstrumentType.OBSERVABLE_GAUGE, callback); } @Override public ObservableLongMeasurement buildObserver() { - // TODO: use InstrumentType.GAUGE when available - return buildObservableMeasurement(InstrumentType.OBSERVABLE_GAUGE); + return builder.buildObservableMeasurement(InstrumentType.OBSERVABLE_GAUGE); + } + + @Override + public String toString() { + return builder.toStringHelper(getClass().getSimpleName()); } } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongHistogram.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongHistogram.java index 5e0d33b6578..b777ad34e59 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongHistogram.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongHistogram.java @@ -5,32 +5,32 @@ package io.opentelemetry.sdk.metrics; -import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.LongHistogramBuilder; import io.opentelemetry.context.Context; -import io.opentelemetry.extension.incubator.metrics.ExtendedLongHistogramBuilder; -import io.opentelemetry.extension.incubator.metrics.LongHistogramAdviceConfigurer; import io.opentelemetry.sdk.internal.ThrottlingLogger; +import io.opentelemetry.sdk.metrics.internal.aggregator.ExplicitBucketHistogramUtils; import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; -import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; import java.util.List; -import java.util.function.Consumer; +import java.util.Objects; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; -final class SdkLongHistogram extends AbstractInstrument implements LongHistogram { +class SdkLongHistogram extends AbstractInstrument implements LongHistogram { private static final Logger logger = Logger.getLogger(SdkLongHistogram.class.getName()); private final ThrottlingLogger throttlingLogger = new ThrottlingLogger(logger); - private final WriteableMetricStorage storage; + final SdkMeter sdkMeter; + final WriteableMetricStorage storage; - private SdkLongHistogram(InstrumentDescriptor descriptor, WriteableMetricStorage storage) { + SdkLongHistogram( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { super(descriptor); + this.sdkMeter = sdkMeter; this.storage = storage; } @@ -57,57 +57,58 @@ public void record(long value) { record(value, Attributes.empty()); } - static final class SdkLongHistogramBuilder - extends AbstractInstrumentBuilder - implements ExtendedLongHistogramBuilder, LongHistogramAdviceConfigurer { + static class SdkLongHistogramBuilder implements LongHistogramBuilder { + + final InstrumentBuilder builder; SdkLongHistogramBuilder( - MeterProviderSharedState meterProviderSharedState, - MeterSharedState sharedState, + SdkMeter sdkMeter, String name, String description, String unit, Advice.AdviceBuilder adviceBuilder) { - super( - meterProviderSharedState, - sharedState, - InstrumentType.HISTOGRAM, - InstrumentValueType.LONG, - name, - description, - unit, - adviceBuilder); + builder = + new InstrumentBuilder(name, InstrumentType.HISTOGRAM, InstrumentValueType.LONG, sdkMeter) + .setDescription(description) + .setUnit(unit) + .setAdviceBuilder(adviceBuilder); } @Override - protected SdkLongHistogramBuilder getThis() { + public LongHistogramBuilder setDescription(String description) { + builder.setDescription(description); return this; } @Override - public SdkLongHistogramBuilder setAdvice( - Consumer adviceConsumer) { - adviceConsumer.accept(this); + public LongHistogramBuilder setUnit(String unit) { + builder.setUnit(unit); return this; } @Override public SdkLongHistogram build() { - return buildSynchronousInstrument(SdkLongHistogram::new); + return builder.buildSynchronousInstrument(SdkLongHistogram::new); } @Override - public LongHistogramAdviceConfigurer setExplicitBucketBoundaries(List bucketBoundaries) { - List doubleBoundaries = - bucketBoundaries.stream().map(Long::doubleValue).collect(Collectors.toList()); - adviceBuilder.setExplicitBucketBoundaries(doubleBoundaries); + public LongHistogramBuilder setExplicitBucketBoundariesAdvice(List bucketBoundaries) { + List boundaries; + try { + Objects.requireNonNull(bucketBoundaries, "bucketBoundaries must not be null"); + boundaries = bucketBoundaries.stream().map(Long::doubleValue).collect(Collectors.toList()); + ExplicitBucketHistogramUtils.validateBucketBoundaries(boundaries); + } catch (IllegalArgumentException | NullPointerException e) { + logger.warning("Error setting explicit bucket boundaries advice: " + e.getMessage()); + return this; + } + builder.setExplicitBucketBoundaries(boundaries); return this; } @Override - public LongHistogramAdviceConfigurer setAttributes(List> attributes) { - adviceBuilder.setAttributes(attributes); - return this; + public String toString() { + return builder.toStringHelper(getClass().getSimpleName()); } } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongUpDownCounter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongUpDownCounter.java index 7c167acc28f..dece771f4b6 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongUpDownCounter.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkLongUpDownCounter.java @@ -5,7 +5,6 @@ package io.opentelemetry.sdk.metrics; -import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.DoubleUpDownCounterBuilder; import io.opentelemetry.api.metrics.LongUpDownCounter; @@ -13,21 +12,19 @@ import io.opentelemetry.api.metrics.ObservableLongMeasurement; import io.opentelemetry.api.metrics.ObservableLongUpDownCounter; import io.opentelemetry.context.Context; -import io.opentelemetry.extension.incubator.metrics.ExtendedLongUpDownCounterBuilder; -import io.opentelemetry.extension.incubator.metrics.LongUpDownCounterAdviceConfigurer; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; -import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; -import java.util.List; import java.util.function.Consumer; -final class SdkLongUpDownCounter extends AbstractInstrument implements LongUpDownCounter { +class SdkLongUpDownCounter extends AbstractInstrument implements LongUpDownCounter { - private final WriteableMetricStorage storage; + final SdkMeter sdkMeter; + final WriteableMetricStorage storage; - private SdkLongUpDownCounter(InstrumentDescriptor descriptor, WriteableMetricStorage storage) { + SdkLongUpDownCounter( + InstrumentDescriptor descriptor, SdkMeter sdkMeter, WriteableMetricStorage storage) { super(descriptor); + this.sdkMeter = sdkMeter; this.storage = storage; } @@ -46,62 +43,53 @@ public void add(long increment) { add(increment, Attributes.empty()); } - static final class SdkLongUpDownCounterBuilder - extends AbstractInstrumentBuilder - implements ExtendedLongUpDownCounterBuilder, LongUpDownCounterAdviceConfigurer { - - SdkLongUpDownCounterBuilder( - MeterProviderSharedState meterProviderSharedState, - MeterSharedState meterSharedState, - String name) { - super( - meterProviderSharedState, - meterSharedState, - InstrumentType.UP_DOWN_COUNTER, - InstrumentValueType.LONG, - name, - "", - DEFAULT_UNIT); + static class SdkLongUpDownCounterBuilder implements LongUpDownCounterBuilder { + + final InstrumentBuilder builder; + + SdkLongUpDownCounterBuilder(SdkMeter sdkMeter, String name) { + this.builder = + new InstrumentBuilder( + name, InstrumentType.UP_DOWN_COUNTER, InstrumentValueType.LONG, sdkMeter); } @Override - protected SdkLongUpDownCounterBuilder getThis() { + public LongUpDownCounterBuilder setDescription(String description) { + builder.setDescription(description); return this; } @Override - public LongUpDownCounterBuilder setAdvice( - Consumer adviceConsumer) { - adviceConsumer.accept(this); + public LongUpDownCounterBuilder setUnit(String unit) { + builder.setUnit(unit); return this; } @Override public LongUpDownCounter build() { - return buildSynchronousInstrument(SdkLongUpDownCounter::new); + return builder.buildSynchronousInstrument(SdkLongUpDownCounter::new); } @Override public DoubleUpDownCounterBuilder ofDoubles() { - return swapBuilder(SdkDoubleUpDownCounter.SdkDoubleUpDownCounterBuilder::new); + return builder.swapBuilder(SdkDoubleUpDownCounter.SdkDoubleUpDownCounterBuilder::new); } @Override public ObservableLongUpDownCounter buildWithCallback( Consumer callback) { - return registerLongAsynchronousInstrument( + return builder.buildLongAsynchronousInstrument( InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, callback); } @Override public ObservableLongMeasurement buildObserver() { - return buildObservableMeasurement(InstrumentType.OBSERVABLE_UP_DOWN_COUNTER); + return builder.buildObservableMeasurement(InstrumentType.OBSERVABLE_UP_DOWN_COUNTER); } @Override - public LongUpDownCounterAdviceConfigurer setAttributes(List> attributes) { - adviceBuilder.setAttributes(attributes); - return this; + public String toString() { + return builder.toStringHelper(getClass().getSimpleName()); } } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkMeter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkMeter.java index 5b36cef4e87..c0f1476077a 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkMeter.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkMeter.java @@ -5,6 +5,10 @@ package io.opentelemetry.sdk.metrics; +import static java.util.stream.Collectors.toMap; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.internal.GuardedBy; import io.opentelemetry.api.metrics.BatchCallback; import io.opentelemetry.api.metrics.DoubleGaugeBuilder; import io.opentelemetry.api.metrics.DoubleHistogramBuilder; @@ -13,19 +17,30 @@ import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.api.metrics.ObservableMeasurement; +import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.internal.MeterConfig; +import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.export.RegisteredReader; +import io.opentelemetry.sdk.metrics.internal.state.AsynchronousMetricStorage; import io.opentelemetry.sdk.metrics.internal.state.CallbackRegistration; import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; +import io.opentelemetry.sdk.metrics.internal.state.MetricStorage; +import io.opentelemetry.sdk.metrics.internal.state.MetricStorageRegistry; import io.opentelemetry.sdk.metrics.internal.state.SdkObservableMeasurement; +import io.opentelemetry.sdk.metrics.internal.state.SynchronousMetricStorage; +import io.opentelemetry.sdk.metrics.internal.state.WriteableMetricStorage; +import io.opentelemetry.sdk.metrics.internal.view.RegisteredView; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Objects; import java.util.Set; +import java.util.function.Function; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; @@ -34,6 +49,18 @@ final class SdkMeter implements Meter { private static final Logger logger = Logger.getLogger(SdkMeter.class.getName()); + private static final boolean INCUBATOR_AVAILABLE; + + static { + boolean incubatorAvailable = false; + try { + Class.forName("io.opentelemetry.api.incubator.metrics.ExtendedDefaultMeterProvider"); + incubatorAvailable = true; + } catch (ClassNotFoundException e) { + // Not available + } + INCUBATOR_AVAILABLE = incubatorAvailable; + } /** * Instrument names MUST conform to the following syntax. @@ -42,27 +69,38 @@ final class SdkMeter implements Meter { *
  • They are not null or empty strings. *
  • They are case-insensitive, ASCII strings. *
  • The first character must be an alphabetic character. - *
  • Subsequent characters must belong to the alphanumeric characters, '_', '.', and '-'. + *
  • Subsequent characters must belong to the alphanumeric characters, '_', '.', '/', and '-'. *
  • They can have a maximum length of 255 characters. * */ private static final Pattern VALID_INSTRUMENT_NAME_PATTERN = - Pattern.compile("([A-Za-z]){1}([A-Za-z0-9\\_\\-\\.]){0,254}"); + Pattern.compile("([A-Za-z]){1}([A-Za-z0-9\\_\\-\\./]){0,254}"); private static final Meter NOOP_METER = MeterProvider.noop().get("noop"); private static final String NOOP_INSTRUMENT_NAME = "noop"; - private final InstrumentationScopeInfo instrumentationScopeInfo; + private final Object collectLock = new Object(); + private final Object callbackLock = new Object(); + + @GuardedBy("callbackLock") + private final List callbackRegistrations = new ArrayList<>(); + private final MeterProviderSharedState meterProviderSharedState; - private final MeterSharedState meterSharedState; + private final InstrumentationScopeInfo instrumentationScopeInfo; + private final Map readerStorageRegistries; + private final boolean meterEnabled; SdkMeter( MeterProviderSharedState meterProviderSharedState, InstrumentationScopeInfo instrumentationScopeInfo, - List registeredReaders) { + List registeredReaders, + MeterConfig meterConfig) { this.instrumentationScopeInfo = instrumentationScopeInfo; this.meterProviderSharedState = meterProviderSharedState; - this.meterSharedState = MeterSharedState.create(instrumentationScopeInfo, registeredReaders); + this.readerStorageRegistries = + registeredReaders.stream() + .collect(toMap(Function.identity(), unused -> new MetricStorageRegistry())); + this.meterEnabled = meterConfig.isEnabled(); } // Visible for testing @@ -72,44 +110,89 @@ InstrumentationScopeInfo getInstrumentationScopeInfo() { /** Collect all metrics for the meter. */ Collection collectAll(RegisteredReader registeredReader, long epochNanos) { - return meterSharedState.collectAll(registeredReader, meterProviderSharedState, epochNanos); + // Short circuit collection process if meter is disabled + if (!meterEnabled) { + return Collections.emptyList(); + } + List currentRegisteredCallbacks; + synchronized (callbackLock) { + currentRegisteredCallbacks = new ArrayList<>(callbackRegistrations); + } + // Collections across all readers are sequential + synchronized (collectLock) { + for (CallbackRegistration callbackRegistration : currentRegisteredCallbacks) { + callbackRegistration.invokeCallback( + registeredReader, meterProviderSharedState.getStartEpochNanos(), epochNanos); + } + + Collection storages = + Objects.requireNonNull(readerStorageRegistries.get(registeredReader)).getStorages(); + List result = new ArrayList<>(storages.size()); + for (MetricStorage storage : storages) { + MetricData current = + storage.collect( + meterProviderSharedState.getResource(), + getInstrumentationScopeInfo(), + meterProviderSharedState.getStartEpochNanos(), + epochNanos); + // Ignore if the metric data doesn't have any data points, for example when aggregation is + // Aggregation#drop() + if (!current.isEmpty()) { + result.add(current); + } + } + return Collections.unmodifiableList(result); + } } - /** Reset the meter, clearing all registered instruments. */ + /** Reset the meter, clearing all registered callbacks and storages. */ void resetForTest() { - this.meterSharedState.resetForTest(); + synchronized (collectLock) { + synchronized (callbackLock) { + callbackRegistrations.clear(); + } + this.readerStorageRegistries.values().forEach(MetricStorageRegistry::resetForTest); + } } @Override public LongCounterBuilder counterBuilder(String name) { - return !checkValidInstrumentName(name) - ? NOOP_METER.counterBuilder(NOOP_INSTRUMENT_NAME) - : new SdkLongCounter.SdkLongCounterBuilder( - meterProviderSharedState, meterSharedState, name); + if (!checkValidInstrumentName(name)) { + return NOOP_METER.counterBuilder(NOOP_INSTRUMENT_NAME); + } + return INCUBATOR_AVAILABLE + ? IncubatingUtil.createExtendedLongCounterBuilder(this, name) + : new SdkLongCounter.SdkLongCounterBuilder(this, name); } @Override public LongUpDownCounterBuilder upDownCounterBuilder(String name) { - return !checkValidInstrumentName(name) - ? NOOP_METER.upDownCounterBuilder(NOOP_INSTRUMENT_NAME) - : new SdkLongUpDownCounter.SdkLongUpDownCounterBuilder( - meterProviderSharedState, meterSharedState, name); + if (!checkValidInstrumentName(name)) { + return NOOP_METER.upDownCounterBuilder(NOOP_INSTRUMENT_NAME); + } + return INCUBATOR_AVAILABLE + ? IncubatingUtil.createExtendedLongUpDownCounterBuilder(this, name) + : new SdkLongUpDownCounter.SdkLongUpDownCounterBuilder(this, name); } @Override public DoubleHistogramBuilder histogramBuilder(String name) { - return !checkValidInstrumentName(name) - ? NOOP_METER.histogramBuilder(NOOP_INSTRUMENT_NAME) - : new SdkDoubleHistogram.SdkDoubleHistogramBuilder( - meterProviderSharedState, meterSharedState, name); + if (!checkValidInstrumentName(name)) { + return NOOP_METER.histogramBuilder(NOOP_INSTRUMENT_NAME); + } + return INCUBATOR_AVAILABLE + ? IncubatingUtil.createExtendedDoubleHistogramBuilder(this, name) + : new SdkDoubleHistogram.SdkDoubleHistogramBuilder(this, name); } @Override public DoubleGaugeBuilder gaugeBuilder(String name) { - return !checkValidInstrumentName(name) - ? NOOP_METER.gaugeBuilder(NOOP_INSTRUMENT_NAME) - : new SdkDoubleGauge.SdkDoubleGaugeBuilder( - meterProviderSharedState, meterSharedState, name); + if (!checkValidInstrumentName(name)) { + return NOOP_METER.gaugeBuilder(NOOP_INSTRUMENT_NAME); + } + return INCUBATOR_AVAILABLE + ? IncubatingUtil.createExtendedDoubleGaugeBuilder(this, name) + : new SdkDoubleGauge.SdkDoubleGaugeBuilder(this, name); } @Override @@ -130,9 +213,7 @@ public BatchCallback batchCallback( continue; } SdkObservableMeasurement sdkMeasurement = (SdkObservableMeasurement) measurement; - if (!meterSharedState - .getInstrumentationScopeInfo() - .equals(sdkMeasurement.getInstrumentationScopeInfo())) { + if (!instrumentationScopeInfo.equals(sdkMeasurement.getInstrumentationScopeInfo())) { logger.log( Level.WARNING, "batchCallback called with instruments that belong to a different Meter."); @@ -143,8 +224,89 @@ public BatchCallback batchCallback( CallbackRegistration callbackRegistration = CallbackRegistration.create(sdkMeasurements, callback); - meterSharedState.registerCallback(callbackRegistration); - return new SdkObservableInstrument(meterSharedState, callbackRegistration); + registerCallback(callbackRegistration); + return new SdkObservableInstrument(this, callbackRegistration); + } + + /** + * Unregister the callback. + * + *

    Callbacks are originally registered via {@link #registerCallback(CallbackRegistration)}. + */ + void removeCallback(CallbackRegistration callbackRegistration) { + synchronized (callbackLock) { + this.callbackRegistrations.remove(callbackRegistration); + } + } + + /** + * Register the callback. + * + *

    The callback will be invoked once per collection until unregistered via {@link + * #removeCallback(CallbackRegistration)}. + */ + void registerCallback(CallbackRegistration callbackRegistration) { + synchronized (callbackLock) { + callbackRegistrations.add(callbackRegistration); + } + } + + /** Returns {@code true} if the {@link MeterConfig#enabled()} of the meter is {@code true}. */ + boolean isMeterEnabled() { + return meterEnabled; + } + + /** Registers new synchronous storage associated with a given instrument. */ + WriteableMetricStorage registerSynchronousMetricStorage(InstrumentDescriptor instrument) { + + List registeredStorages = new ArrayList<>(); + for (Map.Entry entry : + readerStorageRegistries.entrySet()) { + RegisteredReader reader = entry.getKey(); + MetricStorageRegistry registry = entry.getValue(); + for (RegisteredView registeredView : + reader.getViewRegistry().findViews(instrument, getInstrumentationScopeInfo())) { + if (Aggregation.drop() == registeredView.getView().getAggregation()) { + continue; + } + registeredStorages.add( + registry.register( + SynchronousMetricStorage.create( + reader, + registeredView, + instrument, + meterProviderSharedState.getExemplarFilter()))); + } + } + + if (registeredStorages.size() == 1) { + return registeredStorages.get(0); + } + + return new MultiWritableMetricStorage(registeredStorages); + } + + /** Register new asynchronous storage associated with a given instrument. */ + SdkObservableMeasurement registerObservableMeasurement( + InstrumentDescriptor instrumentDescriptor) { + List> registeredStorages = new ArrayList<>(); + for (Map.Entry entry : + readerStorageRegistries.entrySet()) { + RegisteredReader reader = entry.getKey(); + MetricStorageRegistry registry = entry.getValue(); + for (RegisteredView registeredView : + reader.getViewRegistry().findViews(instrumentDescriptor, getInstrumentationScopeInfo())) { + if (Aggregation.drop() == registeredView.getView().getAggregation()) { + continue; + } + registeredStorages.add( + registry.register( + AsynchronousMetricStorage.create(reader, registeredView, instrumentDescriptor))); + } + } + + return SdkObservableMeasurement.create( + instrumentationScopeInfo, instrumentDescriptor, registeredStorages); } @Override @@ -163,10 +325,42 @@ static boolean checkValidInstrumentName(String name) { Level.WARNING, "Instrument name \"" + name - + "\" is invalid, returning noop instrument. Instrument names must consist of 255 or fewer characters including alphanumeric, _, ., -, and start with a letter.", + + "\" is invalid, returning noop instrument. Instrument names must consist of 255 or fewer characters including alphanumeric, _, ., -, /, and start with a letter.", new AssertionError()); } return false; } + + private static class MultiWritableMetricStorage implements WriteableMetricStorage { + private final List storages; + + private MultiWritableMetricStorage(List storages) { + this.storages = storages; + } + + @Override + public void recordLong(long value, Attributes attributes, Context context) { + for (WriteableMetricStorage storage : storages) { + storage.recordLong(value, attributes, context); + } + } + + @Override + public void recordDouble(double value, Attributes attributes, Context context) { + for (WriteableMetricStorage storage : storages) { + storage.recordDouble(value, attributes, context); + } + } + + @Override + public boolean isEnabled() { + for (WriteableMetricStorage storage : storages) { + if (storage.isEnabled()) { + return true; + } + } + return false; + } + } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkMeterProvider.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkMeterProvider.java index 431fc41ab30..bb140a0c59d 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkMeterProvider.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkMeterProvider.java @@ -11,13 +11,17 @@ import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.sdk.common.Clock; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.internal.ComponentRegistry; +import io.opentelemetry.sdk.internal.ScopeConfigurator; import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.CardinalityLimitSelector; +import io.opentelemetry.sdk.metrics.export.CollectionRegistration; +import io.opentelemetry.sdk.metrics.export.MetricProducer; import io.opentelemetry.sdk.metrics.export.MetricReader; +import io.opentelemetry.sdk.metrics.internal.MeterConfig; import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarFilter; -import io.opentelemetry.sdk.metrics.internal.export.CardinalityLimitSelector; -import io.opentelemetry.sdk.metrics.internal.export.MetricProducer; import io.opentelemetry.sdk.metrics.internal.export.RegisteredReader; import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; import io.opentelemetry.sdk.metrics.internal.view.RegisteredView; @@ -45,8 +49,10 @@ public final class SdkMeterProvider implements MeterProvider, Closeable { private final List registeredViews; private final List registeredReaders; + private final List metricProducers; private final MeterProviderSharedState sharedState; private final ComponentRegistry registry; + private final ScopeConfigurator meterConfigurator; private final AtomicBoolean isClosed = new AtomicBoolean(false); /** Returns a new {@link SdkMeterProviderBuilder} for {@link SdkMeterProvider}. */ @@ -57,9 +63,11 @@ public static SdkMeterProviderBuilder builder() { SdkMeterProvider( List registeredViews, IdentityHashMap metricReaders, + List metricProducers, Clock clock, Resource resource, - ExemplarFilter exemplarFilter) { + ExemplarFilter exemplarFilter, + ScopeConfigurator meterConfigurator) { long startEpochNanos = clock.now(); this.registeredViews = registeredViews; this.registeredReaders = @@ -70,19 +78,33 @@ public static SdkMeterProviderBuilder builder() { entry.getKey(), ViewRegistry.create(entry.getKey(), entry.getValue(), registeredViews))) .collect(toList()); + this.metricProducers = metricProducers; this.sharedState = MeterProviderSharedState.create(clock, resource, exemplarFilter, startEpochNanos); this.registry = new ComponentRegistry<>( instrumentationLibraryInfo -> - new SdkMeter(sharedState, instrumentationLibraryInfo, registeredReaders)); + new SdkMeter( + sharedState, + instrumentationLibraryInfo, + registeredReaders, + getMeterConfig(instrumentationLibraryInfo))); + this.meterConfigurator = meterConfigurator; for (RegisteredReader registeredReader : registeredReaders) { - MetricProducer producer = new LeasedMetricProducer(registry, sharedState, registeredReader); - registeredReader.getReader().register(producer); + List readerMetricProducers = new ArrayList<>(metricProducers); + readerMetricProducers.add(new LeasedMetricProducer(registry, sharedState, registeredReader)); + registeredReader + .getReader() + .register(new SdkCollectionRegistration(readerMetricProducers, sharedState)); registeredReader.setLastCollectEpochNanos(startEpochNanos); } } + private MeterConfig getMeterConfig(InstrumentationScopeInfo instrumentationScopeInfo) { + MeterConfig meterConfig = meterConfigurator.apply(instrumentationScopeInfo); + return meterConfig == null ? MeterConfig.defaultConfig() : meterConfig; + } + @Override public MeterBuilder meterBuilder(String instrumentationScopeName) { if (registeredReaders.isEmpty()) { @@ -154,6 +176,8 @@ public String toString() { + sharedState.getResource() + ", metricReaders=" + registeredReaders.stream().map(RegisteredReader::getReader).collect(toList()) + + ", metricProducers=" + + metricProducers + ", views=" + registeredViews + "}"; @@ -176,7 +200,7 @@ private static class LeasedMetricProducer implements MetricProducer { } @Override - public Collection collectAllMetrics() { + public Collection produce(Resource unused) { Collection meters = registry.getComponents(); List result = new ArrayList<>(); long collectTime = sharedState.getClock().now(); @@ -187,4 +211,31 @@ public Collection collectAllMetrics() { return Collections.unmodifiableCollection(result); } } + + private static class SdkCollectionRegistration implements CollectionRegistration { + private final List metricProducers; + private final MeterProviderSharedState sharedState; + + private SdkCollectionRegistration( + List metricProducers, MeterProviderSharedState sharedState) { + this.metricProducers = metricProducers; + this.sharedState = sharedState; + } + + @Override + public Collection collectAllMetrics() { + if (metricProducers.isEmpty()) { + return Collections.emptyList(); + } + Resource resource = sharedState.getResource(); + if (metricProducers.size() == 1) { + return metricProducers.get(0).produce(resource); + } + List metricData = new ArrayList<>(); + for (MetricProducer metricProducer : metricProducers) { + metricData.addAll(metricProducer.produce(resource)); + } + return Collections.unmodifiableList(metricData); + } + } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkMeterProviderBuilder.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkMeterProviderBuilder.java index 1ad5efc0c22..90d0c7a8fef 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkMeterProviderBuilder.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkMeterProviderBuilder.java @@ -6,17 +6,23 @@ package io.opentelemetry.sdk.metrics; import io.opentelemetry.sdk.common.Clock; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; +import io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder; +import io.opentelemetry.sdk.metrics.export.CardinalityLimitSelector; +import io.opentelemetry.sdk.metrics.export.MetricProducer; import io.opentelemetry.sdk.metrics.export.MetricReader; +import io.opentelemetry.sdk.metrics.internal.MeterConfig; import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil; import io.opentelemetry.sdk.metrics.internal.debug.SourceInfo; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarFilter; -import io.opentelemetry.sdk.metrics.internal.export.CardinalityLimitSelector; import io.opentelemetry.sdk.metrics.internal.view.RegisteredView; import io.opentelemetry.sdk.resources.Resource; import java.util.ArrayList; import java.util.IdentityHashMap; import java.util.List; import java.util.Objects; +import java.util.function.Predicate; /** * Builder class for the {@link SdkMeterProvider}. @@ -36,8 +42,11 @@ public final class SdkMeterProviderBuilder { private Resource resource = Resource.getDefault(); private final IdentityHashMap metricReaders = new IdentityHashMap<>(); + private final List metricProducers = new ArrayList<>(); private final List registeredViews = new ArrayList<>(); private ExemplarFilter exemplarFilter = DEFAULT_EXEMPLAR_FILTER; + private ScopeConfiguratorBuilder meterConfiguratorBuilder = + MeterConfig.configuratorBuilder(); SdkMeterProviderBuilder() {} @@ -74,7 +83,7 @@ public SdkMeterProviderBuilder addResource(Resource resource) { /** * Assign an {@link ExemplarFilter} for all metrics created by Meters. * - *

    Note: not currently stable but available for experimental use via {@link + *

    This method is experimental so not public. You may reflectively call it using {@link * SdkMeterProviderUtil#setExemplarFilter(SdkMeterProviderBuilder, ExemplarFilter)}. */ SdkMeterProviderBuilder setExemplarFilter(ExemplarFilter filter) { @@ -119,11 +128,7 @@ public SdkMeterProviderBuilder registerView(InstrumentSelector selector, View vi return this; } - /** - * Registers a {@link MetricReader}. - * - *

    Note: custom implementations of {@link MetricReader} are not currently supported. - */ + /** Registers a {@link MetricReader}. */ public SdkMeterProviderBuilder registerMetricReader(MetricReader reader) { metricReaders.put(reader, CardinalityLimitSelector.defaultCardinalityLimitSelector()); return this; @@ -132,18 +137,76 @@ public SdkMeterProviderBuilder registerMetricReader(MetricReader reader) { /** * Registers a {@link MetricReader} with a {@link CardinalityLimitSelector}. * - *

    Note: not currently stable but available for experimental use via {@link - * SdkMeterProviderUtil#registerMetricReaderWithCardinalitySelector(SdkMeterProviderBuilder, - * MetricReader, CardinalityLimitSelector)}. + *

    If {@link #registerMetricReader(MetricReader)} is used, the {@link + * CardinalityLimitSelector#defaultCardinalityLimitSelector()} is used. + * + * @since 1.44.0 */ - SdkMeterProviderBuilder registerMetricReader( + public SdkMeterProviderBuilder registerMetricReader( MetricReader reader, CardinalityLimitSelector cardinalityLimitSelector) { metricReaders.put(reader, cardinalityLimitSelector); return this; } + /** + * Registers a {@link MetricProducer}. + * + * @since 1.31.0 + */ + public SdkMeterProviderBuilder registerMetricProducer(MetricProducer metricProducer) { + metricProducers.add(metricProducer); + return this; + } + + /** + * Set the meter configurator, which computes {@link MeterConfig} for each {@link + * InstrumentationScopeInfo}. + * + *

    This method is experimental so not public. You may reflectively call it using {@link + * SdkMeterProviderUtil#setMeterConfigurator(SdkMeterProviderBuilder, ScopeConfigurator)}. + * + *

    Overrides any matchers added via {@link #addMeterConfiguratorCondition(Predicate, + * MeterConfig)}. + * + * @see MeterConfig#configuratorBuilder() + */ + SdkMeterProviderBuilder setMeterConfigurator(ScopeConfigurator meterConfigurator) { + this.meterConfiguratorBuilder = meterConfigurator.toBuilder(); + return this; + } + + /** + * Adds a condition to the meter configurator, which computes {@link MeterConfig} for each {@link + * InstrumentationScopeInfo}. + * + *

    This method is experimental so not public. You may reflectively call it using {@link + * SdkMeterProviderUtil#addMeterConfiguratorCondition(SdkMeterProviderBuilder, Predicate, + * MeterConfig)}. + * + *

    Applies after any previously added conditions. + * + *

    If {@link #setMeterConfigurator(ScopeConfigurator)} was previously called, this condition + * will only be applied if the {@link ScopeConfigurator#apply(Object)} returns null for the + * matched {@link InstrumentationScopeInfo}(s). + * + * @see ScopeConfiguratorBuilder#nameEquals(String) + * @see ScopeConfiguratorBuilder#nameMatchesGlob(String) + */ + SdkMeterProviderBuilder addMeterConfiguratorCondition( + Predicate scopeMatcher, MeterConfig meterConfig) { + this.meterConfiguratorBuilder.addCondition(scopeMatcher, meterConfig); + return this; + } + /** Returns an {@link SdkMeterProvider} built with the configuration of this builder. */ public SdkMeterProvider build() { - return new SdkMeterProvider(registeredViews, metricReaders, clock, resource, exemplarFilter); + return new SdkMeterProvider( + registeredViews, + metricReaders, + metricProducers, + clock, + resource, + exemplarFilter, + meterConfiguratorBuilder.build()); } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkObservableInstrument.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkObservableInstrument.java index 33c1adbc1e7..5b7731e9d53 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkObservableInstrument.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/SdkObservableInstrument.java @@ -14,7 +14,6 @@ import io.opentelemetry.api.metrics.ObservableLongUpDownCounter; import io.opentelemetry.sdk.internal.ThrottlingLogger; import io.opentelemetry.sdk.metrics.internal.state.CallbackRegistration; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Level; import java.util.logging.Logger; @@ -31,13 +30,12 @@ class SdkObservableInstrument private static final Logger logger = Logger.getLogger(SdkObservableInstrument.class.getName()); private final ThrottlingLogger throttlingLogger = new ThrottlingLogger(logger); - private final MeterSharedState meterSharedState; + private final SdkMeter sdkMeter; private final CallbackRegistration callbackRegistration; private final AtomicBoolean removed = new AtomicBoolean(false); - SdkObservableInstrument( - MeterSharedState meterSharedState, CallbackRegistration callbackRegistration) { - this.meterSharedState = meterSharedState; + SdkObservableInstrument(SdkMeter sdkMeter, CallbackRegistration callbackRegistration) { + this.sdkMeter = sdkMeter; this.callbackRegistration = callbackRegistration; } @@ -48,7 +46,7 @@ public void close() { Level.WARNING, callbackRegistration + " has called close() multiple times."); return; } - meterSharedState.removeCallback(callbackRegistration); + sdkMeter.removeCallback(callbackRegistration); } @Override diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/View.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/View.java index e545b716459..32f4309b600 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/View.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/View.java @@ -60,8 +60,12 @@ static View create( /** Returns the attribute processor used for this view. */ abstract AttributesProcessor getAttributesProcessor(); - /** Returns the cardinality limit for this view. */ - abstract int getCardinalityLimit(); + /** + * Returns the cardinality limit for this view. + * + * @since 1.44.0 + */ + public abstract int getCardinalityLimit(); @Override public final String toString() { diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ViewBuilder.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ViewBuilder.java index b1f60e35b94..87ce139f81b 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ViewBuilder.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/ViewBuilder.java @@ -7,6 +7,7 @@ import static io.opentelemetry.sdk.metrics.internal.view.AttributesProcessor.setIncludes; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; import io.opentelemetry.sdk.metrics.internal.state.MetricStorage; @@ -93,9 +94,14 @@ public ViewBuilder setAttributeFilter(Predicate keyFilter) { /** * Add an attribute processor. * + *

    This method is experimental so not public. You may reflectively call it using {@link + * SdkMeterProviderUtil#appendFilteredBaggageAttributes(ViewBuilder, Predicate)}, {@link + * SdkMeterProviderUtil#appendAllBaggageAttributes(ViewBuilder)}. + * *

    Note: not currently stable but additional attribute processors can be configured via {@link * SdkMeterProviderUtil#appendAllBaggageAttributes(ViewBuilder)}. */ + @SuppressWarnings("unused") ViewBuilder addAttributesProcessor(AttributesProcessor attributesProcessor) { this.processor = this.processor.then(attributesProcessor); return this; @@ -104,12 +110,13 @@ ViewBuilder addAttributesProcessor(AttributesProcessor attributesProcessor) { /** * Set the cardinality limit. * - *

    Note: not currently stable but cardinality limit can be configured via - * SdkMeterProviderUtil#setCardinalityLimit(ViewBuilder, int)}. + *

    Read {@link MemoryMode} to understand the memory usage behavior of reaching cardinality + * limit. * * @param cardinalityLimit the maximum number of series for a metric + * @since 1.44.0 */ - ViewBuilder setCardinalityLimit(int cardinalityLimit) { + public ViewBuilder setCardinalityLimit(int cardinalityLimit) { if (cardinalityLimit <= 0) { throw new IllegalArgumentException("cardinalityLimit must be > 0"); } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/DoublePointData.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/DoublePointData.java index bcb90a3d96a..fefbd27f547 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/DoublePointData.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/DoublePointData.java @@ -6,14 +6,12 @@ package io.opentelemetry.sdk.metrics.data; import java.util.List; -import javax.annotation.concurrent.Immutable; /** * Point data with a {@code double} aggregation value. * * @since 1.14.0 */ -@Immutable public interface DoublePointData extends PointData { /** Returns the value of the data point. */ double getValue(); diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/LongPointData.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/LongPointData.java index 8cf3129119b..18b42cb9d06 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/LongPointData.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/LongPointData.java @@ -6,14 +6,12 @@ package io.opentelemetry.sdk.metrics.data; import java.util.List; -import javax.annotation.concurrent.Immutable; /** * A point data with a {@code double} aggregation value. * * @since 1.14.0 */ -@Immutable public interface LongPointData extends PointData { /** Returns the value of the data point. */ long getValue(); diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/PointData.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/PointData.java index bb4684cfa6e..4e19c603e99 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/PointData.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/PointData.java @@ -7,7 +7,6 @@ import io.opentelemetry.api.common.Attributes; import java.util.List; -import javax.annotation.concurrent.Immutable; /** * A point in the metric data model. @@ -17,7 +16,6 @@ * * @since 1.14.0 */ -@Immutable public interface PointData { /** Returns the start time of the aggregation in epoch nanos. */ long getStartEpochNanos(); diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/AggregationTemporalitySelector.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/AggregationTemporalitySelector.java index ac13acd964e..f50e2bc5676 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/AggregationTemporalitySelector.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/AggregationTemporalitySelector.java @@ -7,6 +7,7 @@ import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import java.util.StringJoiner; /** * A functional interface that selects {@link AggregationTemporality} based on {@link @@ -76,4 +77,18 @@ static AggregationTemporalitySelector lowMemory() { /** Return the aggregation temporality for the {@link InstrumentType}. */ AggregationTemporality getAggregationTemporality(InstrumentType instrumentType); + + /** + * Returns a string representation of this selector, for using in {@link Object#toString()} + * implementations. + * + * @since 1.38.0 + */ + static String asString(AggregationTemporalitySelector selector) { + StringJoiner joiner = new StringJoiner(", ", "AggregationTemporalitySelector{", "}"); + for (InstrumentType type : InstrumentType.values()) { + joiner.add(type.name() + "=" + selector.getAggregationTemporality(type).name()); + } + return joiner.toString(); + } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/export/CardinalityLimitSelector.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/CardinalityLimitSelector.java similarity index 70% rename from sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/export/CardinalityLimitSelector.java rename to sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/CardinalityLimitSelector.java index 44f155162c9..43318b2ce0b 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/export/CardinalityLimitSelector.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/CardinalityLimitSelector.java @@ -3,22 +3,18 @@ * SPDX-License-Identifier: Apache-2.0 */ -package io.opentelemetry.sdk.metrics.internal.export; +package io.opentelemetry.sdk.metrics.export; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; -import io.opentelemetry.sdk.metrics.export.MetricReader; -import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil; import io.opentelemetry.sdk.metrics.internal.state.MetricStorage; /** * Customize the {@link io.opentelemetry.sdk.metrics.export.MetricReader} cardinality limit as a * function of {@link InstrumentType}. Register via {@link - * SdkMeterProviderUtil#registerMetricReaderWithCardinalitySelector(SdkMeterProviderBuilder, - * MetricReader, CardinalityLimitSelector)}. + * SdkMeterProviderBuilder#registerMetricReader(MetricReader, CardinalityLimitSelector)}. * - *

    This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time. + * @since 1.44.0 */ @FunctionalInterface public interface CardinalityLimitSelector { diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/CollectionRegistration.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/CollectionRegistration.java index 707da70ea95..fbe117aecba 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/CollectionRegistration.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/CollectionRegistration.java @@ -5,7 +5,11 @@ package io.opentelemetry.sdk.metrics.export; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.data.MetricData; +import java.util.Collection; +import java.util.Collections; /** * A {@link CollectionRegistration} is passed to each {@link MetricReader} registered with {@link @@ -13,5 +17,28 @@ * * @since 1.14.0 */ -// TODO(jack-berg): Have methods when custom MetricReaders are supported -public interface CollectionRegistration {} +public interface CollectionRegistration { + + /** + * Returns a noop {@link CollectionRegistration}, useful for {@link MetricReader}s to hold before + * {@link MetricReader#register(CollectionRegistration)} is called. + * + * @since 1.31.0 + */ + static CollectionRegistration noop() { + return new CollectionRegistration() {}; + } + + /** + * Collect all metrics, including metrics from the SDK and any registered {@link MetricProducer}s. + * + *

    If {@link MetricReader#getMemoryMode()} is configured to {@link MemoryMode#REUSABLE_DATA} do + * not keep the result or any of its contained objects as they are to be reused to return the + * result for the next call to this method. + * + * @since 1.31.0 + */ + default Collection collectAllMetrics() { + return Collections.emptyList(); + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/DefaultAggregationSelector.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/DefaultAggregationSelector.java index f3cae755ad4..d6d3058abd9 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/DefaultAggregationSelector.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/DefaultAggregationSelector.java @@ -9,6 +9,8 @@ import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.internal.aggregator.AggregationUtil; +import java.util.StringJoiner; /** * A functional interface that selects default {@link Aggregation} based on {@link InstrumentType}. @@ -58,4 +60,21 @@ default DefaultAggregationSelector with(InstrumentType instrumentType, Aggregati *

    The default aggregation is used when an instrument does not match any views. */ Aggregation getDefaultAggregation(InstrumentType instrumentType); + + /** + * Returns a string representation of this selector, for using in {@link Object#toString()} + * implementations. + * + * @since 1.38.0 + */ + static String asString(DefaultAggregationSelector selector) { + StringJoiner joiner = new StringJoiner(", ", "DefaultAggregationSelector{", "}"); + for (InstrumentType type : InstrumentType.values()) { + joiner.add( + type.name() + + "=" + + AggregationUtil.aggregationName(selector.getDefaultAggregation(type))); + } + return joiner.toString(); + } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/MetricExporter.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/MetricExporter.java index af9f5f4747c..685c5f09bf1 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/MetricExporter.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/MetricExporter.java @@ -5,7 +5,10 @@ package io.opentelemetry.sdk.metrics.export; +import static io.opentelemetry.sdk.common.export.MemoryMode.IMMUTABLE_DATA; + import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.SdkMeterProvider; @@ -38,6 +41,16 @@ default Aggregation getDefaultAggregation(InstrumentType instrumentType) { return Aggregation.defaultAggregation(); } + /** + * Returns the memory mode used by this exporter's associated reader. + * + * @return The {@link MemoryMode} used by this exporter's associated reader + * @since 1.31.0 + */ + default MemoryMode getMemoryMode() { + return IMMUTABLE_DATA; + } + /** * Exports the {@code metrics}. The caller (i.e. {@link PeriodicMetricReader} will not call export * until the previous call completes. diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/MetricProducer.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/MetricProducer.java new file mode 100644 index 00000000000..723eb7e1e92 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/MetricProducer.java @@ -0,0 +1,42 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.export; + +import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.resources.Resource; +import java.util.Collection; +import javax.annotation.concurrent.ThreadSafe; + +/** + * {@link MetricProducer} is the interface that is used to make metric data available to the {@link + * MetricReader}s. The primary implementation is provided by {@link + * io.opentelemetry.sdk.metrics.SdkMeterProvider}. + * + *

    Alternative {@link MetricProducer} implementations can be used to bridge aggregated metrics + * from other frameworks, and are registered with {@link + * SdkMeterProviderBuilder#registerMetricProducer(MetricProducer)}. NOTE: When possible, metrics + * from other frameworks SHOULD be bridged using the metric API, normally with asynchronous + * instruments which observe the aggregated state of the other framework. However, {@link + * MetricProducer} exists to accommodate scenarios where the metric API is insufficient. It should + * be used with caution as it requires the bridge to take a dependency on {@code + * opentelemetry-sdk-metrics}, which is generally not advised. + * + *

    Implementations must be thread-safe. + * + * @since 1.31.0 + */ +@ThreadSafe +public interface MetricProducer { + + /** + * Returns a collection of produced {@link MetricData}s to be exported. This will only be those + * metrics that have been produced since the last time this method was called. + * + * @return a collection of produced {@link MetricData}s to be exported. + */ + Collection produce(Resource resource); +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/MetricReader.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/MetricReader.java index 3752ba2640c..7ddab97dfc1 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/MetricReader.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/MetricReader.java @@ -5,7 +5,10 @@ package io.opentelemetry.sdk.metrics.export; +import static io.opentelemetry.sdk.common.export.MemoryMode.IMMUTABLE_DATA; + import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.SdkMeterProvider; @@ -16,20 +19,15 @@ /** * A metric reader reads metrics from an {@link SdkMeterProvider}. * - *

    Custom implementations of {@link MetricReader} are not currently supported. Please use one of - * the built-in readers such as {@link PeriodicMetricReader}. - * * @since 1.14.0 */ public interface MetricReader extends AggregationTemporalitySelector, DefaultAggregationSelector, Closeable { /** - * Called by {@link SdkMeterProvider} and supplies the {@link MetricReader} with a handle to - * collect metrics. - * - *

    {@link CollectionRegistration} is currently an empty interface because custom - * implementations of {@link MetricReader} are not currently supported. + * Called by {@link SdkMeterProvider} on initialization to supply the {@link MetricReader} with + * {@link MetricProducer}s used to collect metrics. {@link MetricReader} implementations call + * {@link CollectionRegistration#collectAllMetrics()} to read metrics. */ void register(CollectionRegistration registration); @@ -44,6 +42,16 @@ default Aggregation getDefaultAggregation(InstrumentType instrumentType) { return Aggregation.defaultAggregation(); } + /** + * Returns the memory mode used by this reader. + * + * @return The {@link MemoryMode} used by this instance + * @since 1.31.0 + */ + default MemoryMode getMemoryMode() { + return IMMUTABLE_DATA; + } + /** * Read and export the metrics. * diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/PeriodicMetricReader.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/PeriodicMetricReader.java index a03e10e35d2..a19dbae5781 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/PeriodicMetricReader.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/export/PeriodicMetricReader.java @@ -6,13 +6,13 @@ package io.opentelemetry.sdk.metrics.export; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.internal.export.MetricProducer; import java.util.Collection; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; @@ -39,8 +39,8 @@ public final class PeriodicMetricReader implements MetricReader { private final ScheduledExecutorService scheduler; private final Scheduled scheduled; private final Object lock = new Object(); + private volatile CollectionRegistration collectionRegistration = CollectionRegistration.noop(); - private volatile MetricProducer metricProducer = MetricProducer.noop(); @Nullable private volatile ScheduledFuture scheduledFuture; /** @@ -74,6 +74,11 @@ public Aggregation getDefaultAggregation(InstrumentType instrumentType) { return exporter.getDefaultAggregation(instrumentType); } + @Override + public MemoryMode getMemoryMode() { + return exporter.getMemoryMode(); + } + @Override public CompletableResultCode forceFlush() { return scheduled.doRun(); @@ -111,8 +116,8 @@ public CompletableResultCode shutdown() { } @Override - public void register(CollectionRegistration registration) { - this.metricProducer = MetricProducer.asMetricProducer(registration); + public void register(CollectionRegistration collectionRegistration) { + this.collectionRegistration = collectionRegistration; start(); } @@ -153,7 +158,7 @@ CompletableResultCode doRun() { CompletableResultCode flushResult = new CompletableResultCode(); if (exportAvailable.compareAndSet(true, false)) { try { - Collection metricData = metricProducer.collectAllMetrics(); + Collection metricData = collectionRegistration.collectAllMetrics(); if (metricData.isEmpty()) { logger.log(Level.FINE, "No metric data to export - skipping export."); flushResult.succeed(); diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/MeterConfig.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/MeterConfig.java new file mode 100644 index 00000000000..35981025071 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/MeterConfig.java @@ -0,0 +1,66 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; +import io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder; +import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; +import java.util.function.Predicate; +import javax.annotation.concurrent.Immutable; + +/** + * A collection of configuration options which define the behavior of a {@link Meter}. + * + *

    This class is internal and experimental. Its APIs are unstable and can change at any time. Its + * APIs (or a version of them) may be promoted to the public stable API in the future, but no + * guarantees are made. + * + * @see SdkMeterProviderUtil#setMeterConfigurator(SdkMeterProviderBuilder, ScopeConfigurator) + * @see SdkMeterProviderUtil#addMeterConfiguratorCondition(SdkMeterProviderBuilder, Predicate, + * MeterConfig) + */ +@AutoValue +@Immutable +public abstract class MeterConfig { + + private static final MeterConfig DEFAULT_CONFIG = new AutoValue_MeterConfig(/* enabled= */ true); + private static final MeterConfig DISABLED_CONFIG = + new AutoValue_MeterConfig(/* enabled= */ false); + + /** Returns a disabled {@link MeterConfig}. */ + public static MeterConfig disabled() { + return DISABLED_CONFIG; + } + + /** Returns an enabled {@link MeterConfig}. */ + public static MeterConfig enabled() { + return DEFAULT_CONFIG; + } + + /** + * Returns the default {@link MeterConfig}, which is used when no configurator is set or when the + * meter configurator returns {@code null} for a {@link InstrumentationScopeInfo}. + */ + public static MeterConfig defaultConfig() { + return DEFAULT_CONFIG; + } + + /** + * Create a {@link ScopeConfiguratorBuilder} for configuring {@link + * SdkMeterProviderUtil#setMeterConfigurator(SdkMeterProviderBuilder, ScopeConfigurator)}. + */ + public static ScopeConfiguratorBuilder configuratorBuilder() { + return ScopeConfigurator.builder(); + } + + MeterConfig() {} + + /** Returns {@code true} if this meter is enabled. Defaults to {@code true}. */ + public abstract boolean isEnabled(); +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/SdkMeterProviderUtil.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/SdkMeterProviderUtil.java index ecaf5e388ca..0f6d3035b25 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/SdkMeterProviderUtil.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/SdkMeterProviderUtil.java @@ -5,12 +5,12 @@ package io.opentelemetry.sdk.metrics.internal; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; import io.opentelemetry.sdk.metrics.ViewBuilder; -import io.opentelemetry.sdk.metrics.export.MetricReader; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarFilter; -import io.opentelemetry.sdk.metrics.internal.export.CardinalityLimitSelector; import io.opentelemetry.sdk.metrics.internal.view.AttributesProcessor; import io.opentelemetry.sdk.metrics.internal.view.StringPredicates; import java.lang.reflect.InvocationTargetException; @@ -18,8 +18,12 @@ import java.util.function.Predicate; /** - * This class is internal and is hence not for public use. Its APIs are unstable and can change at - * any time. + * A collection of methods that allow use of experimental features prior to availability in public + * APIs. + * + *

    This class is internal and experimental. Its APIs are unstable and can change at any time. Its + * APIs (or a version of them) may be promoted to the public stable API in the future, but no + * guarantees are made. */ public final class SdkMeterProviderUtil { @@ -44,25 +48,36 @@ public static void setExemplarFilter( } } - /** - * Reflectively add a {@link MetricReader} with the {@link CardinalityLimitSelector} to the {@link - * SdkMeterProviderBuilder}. - * - * @param sdkMeterProviderBuilder the builder - */ - public static void registerMetricReaderWithCardinalitySelector( + /** Reflectively set the {@link ScopeConfigurator} to the {@link SdkMeterProviderBuilder}. */ + public static void setMeterConfigurator( + SdkMeterProviderBuilder sdkMeterProviderBuilder, + ScopeConfigurator meterConfigurator) { + try { + Method method = + SdkMeterProviderBuilder.class.getDeclaredMethod( + "setMeterConfigurator", ScopeConfigurator.class); + method.setAccessible(true); + method.invoke(sdkMeterProviderBuilder, meterConfigurator); + } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { + throw new IllegalStateException( + "Error calling setMeterConfigurator on SdkMeterProviderBuilder", e); + } + } + + /** Reflectively add a tracer configurator condition to the {@link SdkMeterProviderBuilder}. */ + public static void addMeterConfiguratorCondition( SdkMeterProviderBuilder sdkMeterProviderBuilder, - MetricReader metricReader, - CardinalityLimitSelector cardinalityLimitSelector) { + Predicate scopeMatcher, + MeterConfig meterConfig) { try { Method method = SdkMeterProviderBuilder.class.getDeclaredMethod( - "registerMetricReader", MetricReader.class, CardinalityLimitSelector.class); + "addMeterConfiguratorCondition", Predicate.class, MeterConfig.class); method.setAccessible(true); - method.invoke(sdkMeterProviderBuilder, metricReader, cardinalityLimitSelector); + method.invoke(sdkMeterProviderBuilder, scopeMatcher, meterConfig); } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { throw new IllegalStateException( - "Error calling addMetricReader on SdkMeterProviderBuilder", e); + "Error calling addMeterConfiguratorCondition on SdkMeterProviderBuilder", e); } } @@ -105,21 +120,6 @@ private static void addAttributesProcessor( } } - /** - * Reflectively set the {@code cardinalityLimit} on the {@link ViewBuilder}. - * - * @param viewBuilder the builder - */ - public static void setCardinalityLimit(ViewBuilder viewBuilder, int cardinalityLimit) { - try { - Method method = ViewBuilder.class.getDeclaredMethod("setCardinalityLimit", int.class); - method.setAccessible(true); - method.invoke(viewBuilder, cardinalityLimit); - } catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { - throw new IllegalStateException("Error setting cardinalityLimit on ViewBuilder", e); - } - } - /** Reflectively reset the {@link SdkMeterProvider}, clearing all registered instruments. */ public static void resetForTest(SdkMeterProvider sdkMeterProvider) { try { diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/Aggregator.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/Aggregator.java index 863aff87300..e44eb2de33e 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/Aggregator.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/Aggregator.java @@ -53,6 +53,21 @@ default T diff(T previousCumulative, T currentCumulative) { throw new UnsupportedOperationException("This aggregator does not support diff."); } + /** + * Resets one reusable point to be a DELTA point by computing the difference between two + * cumulative points. + * + *

    The delta between the two points is set on {@code previousCumulativeReusable} + * + *

    Aggregators MUST implement diff if it can be used with asynchronous instruments. + * + * @param previousCumulativeReusable the previously captured point. + * @param currentCumulative the newly captured (cumulative) point. + */ + default void diffInPlace(T previousCumulativeReusable, T currentCumulative) { + throw new UnsupportedOperationException("This aggregator does not support diffInPlace."); + } + /** * Return a new point representing the measurement. * @@ -62,6 +77,26 @@ default T toPoint(Measurement measurement) { throw new UnsupportedOperationException("This aggregator does not support toPoint."); } + /** + * Resets {@code reusablePoint} to represent the {@code measurement}. + * + *

    Aggregators MUST implement diff if it can be used with asynchronous instruments. + */ + default void toPoint(Measurement measurement, T reusablePoint) { + throw new UnsupportedOperationException("This aggregator does not support toPoint."); + } + + /** Creates a new reusable point. */ + default T createReusablePoint() { + throw new UnsupportedOperationException( + "This aggregator does not support createReusablePoint."); + } + + /** Copies {@code point} into {@code toReusablePoint}. */ + default void copyPoint(T point, T toReusablePoint) { + throw new UnsupportedOperationException("This aggregator does not support toPoint."); + } + /** * Returns the {@link MetricData} that this {@code Aggregation} will produce. * diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AggregatorFactory.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AggregatorFactory.java index 9802256b3b1..071df0fca81 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AggregatorFactory.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AggregatorFactory.java @@ -5,6 +5,7 @@ package io.opentelemetry.sdk.metrics.internal.aggregator; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.PointData; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; @@ -26,15 +27,18 @@ public interface AggregatorFactory { * @param instrumentDescriptor the descriptor of the {@code Instrument} that will record * measurements. * @param exemplarFilter the filter on which measurements should turn into exemplars + * @param memoryMode The {@link MemoryMode} the aggregator will use * @return a new {@link Aggregator}. {@link Aggregator#drop()} indicates no measurements should be * recorded. */ Aggregator createAggregator( - InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter); + InstrumentDescriptor instrumentDescriptor, + ExemplarFilter exemplarFilter, + MemoryMode memoryMode); /** * Determine if the {@link Aggregator} produced by {@link #createAggregator(InstrumentDescriptor, - * ExemplarFilter)} is compatible with the {@code instrumentDescriptor}. + * ExemplarFilter, MemoryMode)} is compatible with the {@code instrumentDescriptor}. */ boolean isCompatibleWithInstrument(InstrumentDescriptor instrumentDescriptor); } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AggregatorHandle.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AggregatorHandle.java index 6e3b3f06f53..2bf7c8c4db6 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AggregatorHandle.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/AggregatorHandle.java @@ -28,6 +28,7 @@ public abstract class AggregatorHandle exemplarReservoir; + private volatile boolean valuesRecorded = false; protected AggregatorHandle(ExemplarReservoir exemplarReservoir) { this.exemplarReservoir = exemplarReservoir; @@ -39,6 +40,10 @@ protected AggregatorHandle(ExemplarReservoir exemplarReservoir) { */ public final T aggregateThenMaybeReset( long startEpochNanos, long epochNanos, Attributes attributes, boolean reset) { + if (reset) { + valuesRecorded = false; + } + return doAggregateThenMaybeReset( startEpochNanos, epochNanos, @@ -69,6 +74,7 @@ public final void recordLong(long value, Attributes attributes, Context context) */ public final void recordLong(long value) { doRecordLong(value); + valuesRecorded = true; } /** @@ -94,6 +100,7 @@ public final void recordDouble(double value, Attributes attributes, Context cont */ public final void recordDouble(double value) { doRecordDouble(value); + valuesRecorded = true; } /** @@ -104,4 +111,13 @@ protected void doRecordDouble(double value) { throw new UnsupportedOperationException( "This aggregator does not support recording double values."); } + + /** + * Checks whether this handle has values recorded. + * + * @return True if values has been recorded to it + */ + public boolean hasRecordedValues() { + return valuesRecorded; + } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/Base2ExponentialHistogramIndexer.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/Base2ExponentialHistogramIndexer.java index 749cba883ae..a0b1acb52a4 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/Base2ExponentialHistogramIndexer.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/Base2ExponentialHistogramIndexer.java @@ -43,7 +43,7 @@ private Base2ExponentialHistogramIndexer(int scale) { /** Get an indexer for the given scale. Indexers are cached and reused for performance. */ static Base2ExponentialHistogramIndexer get(int scale) { - return cache.computeIfAbsent(scale, unused -> new Base2ExponentialHistogramIndexer(scale)); + return cache.computeIfAbsent(scale, Base2ExponentialHistogramIndexer::new); } /** diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramAggregator.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramAggregator.java index 35f8289441e..18b5b60ee4b 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramAggregator.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramAggregator.java @@ -5,25 +5,26 @@ package io.opentelemetry.sdk.metrics.internal.aggregator; -import com.google.auto.value.AutoValue; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData; import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.internal.data.EmptyExponentialHistogramBuckets; import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; +import io.opentelemetry.sdk.metrics.internal.data.MutableExponentialHistogramBuckets; +import io.opentelemetry.sdk.metrics.internal.data.MutableExponentialHistogramPointData; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.resources.Resource; import java.util.Collection; -import java.util.Collections; import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; import java.util.function.Supplier; import javax.annotation.Nullable; @@ -39,6 +40,7 @@ public final class DoubleBase2ExponentialHistogramAggregator private final Supplier> reservoirSupplier; private final int maxBuckets; private final int maxScale; + private final MemoryMode memoryMode; /** * Constructs an exponential histogram aggregator. @@ -48,15 +50,17 @@ public final class DoubleBase2ExponentialHistogramAggregator public DoubleBase2ExponentialHistogramAggregator( Supplier> reservoirSupplier, int maxBuckets, - int maxScale) { + int maxScale, + MemoryMode memoryMode) { this.reservoirSupplier = reservoirSupplier; this.maxBuckets = maxBuckets; this.maxScale = maxScale; + this.memoryMode = memoryMode; } @Override public AggregatorHandle createHandle() { - return new Handle(reservoirSupplier.get(), maxBuckets, maxScale); + return new Handle(reservoirSupplier.get(), maxBuckets, maxScale, memoryMode); } @Override @@ -87,8 +91,16 @@ static final class Handle private double max; private long count; private int currentScale; + private final MemoryMode memoryMode; - Handle(ExemplarReservoir reservoir, int maxBuckets, int maxScale) { + // Used only when MemoryMode = REUSABLE_DATA + @Nullable private final MutableExponentialHistogramPointData reusablePoint; + + Handle( + ExemplarReservoir reservoir, + int maxBuckets, + int maxScale, + MemoryMode memoryMode) { super(reservoir); this.maxBuckets = maxBuckets; this.maxScale = maxScale; @@ -98,6 +110,11 @@ static final class Handle this.max = -1; this.count = 0; this.currentScale = maxScale; + this.reusablePoint = + (memoryMode == MemoryMode.REUSABLE_DATA) + ? new MutableExponentialHistogramPointData() + : null; + this.memoryMode = memoryMode; } @Override @@ -107,21 +124,46 @@ protected synchronized ExponentialHistogramPointData doAggregateThenMaybeReset( Attributes attributes, List exemplars, boolean reset) { - ExponentialHistogramPointData point = - ImmutableExponentialHistogramPointData.create( - currentScale, - sum, - zeroCount, - this.count > 0, - this.min, - this.count > 0, - this.max, - resolveBuckets(this.positiveBuckets, currentScale, reset), - resolveBuckets(this.negativeBuckets, currentScale, reset), - startEpochNanos, - epochNanos, - attributes, - exemplars); + + ExponentialHistogramPointData point; + if (reusablePoint == null) { + point = + ImmutableExponentialHistogramPointData.create( + currentScale, + sum, + zeroCount, + this.count > 0, + this.min, + this.count > 0, + this.max, + resolveBuckets( + this.positiveBuckets, currentScale, reset, /* reusableBuckets= */ null), + resolveBuckets( + this.negativeBuckets, currentScale, reset, /* reusableBuckets= */ null), + startEpochNanos, + epochNanos, + attributes, + exemplars); + } else /* REUSABLE_DATA */ { + point = + reusablePoint.set( + currentScale, + sum, + zeroCount, + this.count > 0, + this.min, + this.count > 0, + this.max, + resolveBuckets( + this.positiveBuckets, currentScale, reset, reusablePoint.getPositiveBuckets()), + resolveBuckets( + this.negativeBuckets, currentScale, reset, reusablePoint.getNegativeBuckets()), + startEpochNanos, + epochNanos, + attributes, + exemplars); + } + if (reset) { this.sum = 0; this.zeroCount = 0; @@ -134,11 +176,38 @@ protected synchronized ExponentialHistogramPointData doAggregateThenMaybeReset( } private ExponentialHistogramBuckets resolveBuckets( - @Nullable DoubleBase2ExponentialHistogramBuckets buckets, int scale, boolean reset) { + @Nullable DoubleBase2ExponentialHistogramBuckets buckets, + int scale, + boolean reset, + @Nullable ExponentialHistogramBuckets reusableBuckets) { if (buckets == null) { return EmptyExponentialHistogramBuckets.get(scale); } - ExponentialHistogramBuckets copy = buckets.copy(); + + ExponentialHistogramBuckets copy; + if (reusableBuckets == null) { + copy = buckets.copy(); + } else { + MutableExponentialHistogramBuckets mutableExponentialHistogramBuckets; + if (reusableBuckets instanceof MutableExponentialHistogramBuckets) { + mutableExponentialHistogramBuckets = (MutableExponentialHistogramBuckets) reusableBuckets; + } else /* EmptyExponentialHistogramBuckets */ { + mutableExponentialHistogramBuckets = new MutableExponentialHistogramBuckets(); + } + + DynamicPrimitiveLongList reusableBucketCountsList = + mutableExponentialHistogramBuckets.getReusableBucketCountsList(); + buckets.getBucketCountsIntoReusableList(reusableBucketCountsList); + + mutableExponentialHistogramBuckets.set( + buckets.getScale(), + buckets.getOffset(), + buckets.getTotalCount(), + reusableBucketCountsList); + + copy = mutableExponentialHistogramBuckets; + } + if (reset) { buckets.clear(maxScale); } @@ -166,13 +235,15 @@ protected synchronized void doRecordDouble(double value) { } else if (c > 0) { // Initialize positive buckets at current scale, if needed if (positiveBuckets == null) { - positiveBuckets = new DoubleBase2ExponentialHistogramBuckets(currentScale, maxBuckets); + positiveBuckets = + new DoubleBase2ExponentialHistogramBuckets(currentScale, maxBuckets, memoryMode); } buckets = positiveBuckets; } else { // Initialize negative buckets at current scale, if needed if (negativeBuckets == null) { - negativeBuckets = new DoubleBase2ExponentialHistogramBuckets(currentScale, maxBuckets); + negativeBuckets = + new DoubleBase2ExponentialHistogramBuckets(currentScale, maxBuckets, memoryMode); } buckets = negativeBuckets; } @@ -206,21 +277,4 @@ void downScale(int by) { } } } - - @AutoValue - abstract static class EmptyExponentialHistogramBuckets implements ExponentialHistogramBuckets { - - private static final Map ZERO_BUCKETS = - new ConcurrentHashMap<>(); - - EmptyExponentialHistogramBuckets() {} - - static ExponentialHistogramBuckets get(int scale) { - return ZERO_BUCKETS.computeIfAbsent( - scale, - scale1 -> - new AutoValue_DoubleBase2ExponentialHistogramAggregator_EmptyExponentialHistogramBuckets( - scale1, 0, Collections.emptyList(), 0)); - } - } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramBuckets.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramBuckets.java index 0b66fd74a85..94659d0ecc4 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramBuckets.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramBuckets.java @@ -5,6 +5,11 @@ package io.opentelemetry.sdk.metrics.internal.aggregator; +import static io.opentelemetry.sdk.common.export.MemoryMode.IMMUTABLE_DATA; +import static io.opentelemetry.sdk.common.export.MemoryMode.REUSABLE_DATA; + +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList; import io.opentelemetry.sdk.internal.PrimitiveLongList; import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; import java.util.Collections; @@ -20,12 +25,17 @@ */ final class DoubleBase2ExponentialHistogramBuckets implements ExponentialHistogramBuckets { + private final MemoryMode memoryMode; private AdaptingCircularBufferCounter counts; private int scale; private Base2ExponentialHistogramIndexer base2ExponentialHistogramIndexer; private long totalCount; - DoubleBase2ExponentialHistogramBuckets(int scale, int maxBuckets) { + // Only used when memory mode is REUSABLE_DATA + @Nullable private AdaptingCircularBufferCounter reusableCounts; + + DoubleBase2ExponentialHistogramBuckets(int scale, int maxBuckets, MemoryMode memoryMode) { + this.memoryMode = memoryMode; this.counts = new AdaptingCircularBufferCounter(maxBuckets); this.scale = scale; this.base2ExponentialHistogramIndexer = Base2ExponentialHistogramIndexer.get(this.scale); @@ -38,6 +48,8 @@ final class DoubleBase2ExponentialHistogramBuckets implements ExponentialHistogr this.scale = buckets.scale; this.base2ExponentialHistogramIndexer = buckets.base2ExponentialHistogramIndexer; this.totalCount = buckets.totalCount; + this.memoryMode = buckets.memoryMode; + this.reusableCounts = buckets.reusableCounts; } /** Returns a copy of this bucket. */ @@ -90,6 +102,31 @@ public List getBucketCounts() { return PrimitiveLongList.wrap(countsArr); } + /** + * Fills the given reusable list with the bucket counts. + * + *

    NOTE: This is the same as {@link #getBucketCounts()} but instead of returning a List with + * the values is fill the values into {@code reusableLongList} + * + * @param reusableLongList The list to fill with the bucket counts + */ + void getBucketCountsIntoReusableList(DynamicPrimitiveLongList reusableLongList) { + if (counts.isEmpty()) { + reusableLongList.resizeAndClear(0); + return; + } + + int length = counts.getIndexEnd() - counts.getIndexStart() + 1; + + if (reusableLongList.size() != length) { + reusableLongList.resizeAndClear(length); + } + + for (int i = 0; i < length; i++) { + reusableLongList.setLong(i, counts.get(i + counts.getIndexStart())); + } + } + @Override public long getTotalCount() { return totalCount; @@ -107,7 +144,16 @@ void downscale(int by) { // We want to preserve other optimisations here as well, e.g. integer size. // Instead of creating a new counter, we copy the existing one (for bucket size // optimisations), and clear the values before writing the new ones. - AdaptingCircularBufferCounter newCounts = new AdaptingCircularBufferCounter(counts); + AdaptingCircularBufferCounter newCounts; + if (memoryMode == IMMUTABLE_DATA) { + newCounts = new AdaptingCircularBufferCounter(counts); + } else { + if (reusableCounts == null) { + reusableCounts = new AdaptingCircularBufferCounter(counts); + } + newCounts = reusableCounts; + } + newCounts.clear(); for (int i = counts.getIndexStart(); i <= counts.getIndexEnd(); i++) { @@ -119,7 +165,14 @@ void downscale(int by) { } } } - this.counts = newCounts; + + if (memoryMode == REUSABLE_DATA) { + AdaptingCircularBufferCounter existingCounts = this.counts; + this.counts = newCounts; + reusableCounts = existingCounts; + } else { + this.counts = newCounts; + } } this.scale = this.scale - by; diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExplicitBucketHistogramAggregator.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExplicitBucketHistogramAggregator.java index e011351cd50..82d71f25313 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExplicitBucketHistogramAggregator.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExplicitBucketHistogramAggregator.java @@ -8,6 +8,7 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.internal.GuardedBy; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.internal.PrimitiveLongList; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; @@ -16,6 +17,7 @@ import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; +import io.opentelemetry.sdk.metrics.internal.data.MutableHistogramPointData; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.resources.Resource; @@ -24,8 +26,8 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.concurrent.locks.ReentrantLock; import java.util.function.Supplier; +import javax.annotation.Nullable; /** * Aggregator that generates explicit bucket histograms. @@ -36,6 +38,7 @@ public final class DoubleExplicitBucketHistogramAggregator implements Aggregator { private final double[] boundaries; + private final MemoryMode memoryMode; // a cache for converting to MetricData private final List boundaryList; @@ -47,10 +50,14 @@ public final class DoubleExplicitBucketHistogramAggregator * * @param boundaries Bucket boundaries, in-order. * @param reservoirSupplier Supplier of exemplar reservoirs per-stream. + * @param memoryMode The {@link MemoryMode} to use in this aggregator. */ public DoubleExplicitBucketHistogramAggregator( - double[] boundaries, Supplier> reservoirSupplier) { + double[] boundaries, + Supplier> reservoirSupplier, + MemoryMode memoryMode) { this.boundaries = boundaries; + this.memoryMode = memoryMode; List boundaryList = new ArrayList<>(this.boundaries.length); for (double v : this.boundaries) { @@ -62,7 +69,7 @@ public DoubleExplicitBucketHistogramAggregator( @Override public AggregatorHandle createHandle() { - return new Handle(this.boundaryList, this.boundaries, reservoirSupplier.get()); + return new Handle(this.boundaryList, this.boundaries, reservoirSupplier.get(), memoryMode); } @Override @@ -87,6 +94,8 @@ static final class Handle extends AggregatorHandle boundaryList, double[] boundaries, - ExemplarReservoir reservoir) { + ExemplarReservoir reservoir, + MemoryMode memoryMode) { super(reservoir); this.boundaryList = boundaryList; this.boundaries = boundaries; @@ -116,6 +127,9 @@ static final class Handle extends AggregatorHandle exemplars, boolean reset) { - lock.lock(); - try { - HistogramPointData pointData = - ImmutableHistogramPointData.create( - startEpochNanos, - epochNanos, - attributes, - sum, - this.count > 0, - this.min, - this.count > 0, - this.max, - boundaryList, - PrimitiveLongList.wrap(Arrays.copyOf(counts, counts.length)), - exemplars); + synchronized (lock) { + HistogramPointData pointData; + if (reusablePoint == null) { + pointData = + ImmutableHistogramPointData.create( + startEpochNanos, + epochNanos, + attributes, + sum, + this.count > 0, + this.min, + this.count > 0, + this.max, + boundaryList, + PrimitiveLongList.wrap(Arrays.copyOf(counts, counts.length)), + exemplars); + } else /* REUSABLE_DATA */ { + pointData = + reusablePoint.set( + startEpochNanos, + epochNanos, + attributes, + sum, + this.count > 0, + this.min, + this.count > 0, + this.max, + boundaryList, + counts, + exemplars); + } if (reset) { this.sum = 0; this.min = Double.MAX_VALUE; @@ -148,8 +178,6 @@ protected HistogramPointData doAggregateThenMaybeReset( Arrays.fill(this.counts, 0); } return pointData; - } finally { - lock.unlock(); } } @@ -157,15 +185,12 @@ protected HistogramPointData doAggregateThenMaybeReset( protected void doRecordDouble(double value) { int bucketIndex = ExplicitBucketHistogramUtils.findBucketIndex(this.boundaries, value); - lock.lock(); - try { + synchronized (lock) { this.sum += value; this.min = Math.min(this.min, value); this.max = Math.max(this.max, value); this.count++; this.counts[bucketIndex]++; - } finally { - lock.unlock(); } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleLastValueAggregator.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleLastValueAggregator.java index a56df5860ce..2063cb212d7 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleLastValueAggregator.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleLastValueAggregator.java @@ -7,6 +7,7 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoublePointData; @@ -14,6 +15,7 @@ import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; +import io.opentelemetry.sdk.metrics.internal.data.MutableDoublePointData; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.state.Measurement; @@ -41,15 +43,17 @@ public final class DoubleLastValueAggregator implements Aggregator { private final Supplier> reservoirSupplier; + private final MemoryMode memoryMode; public DoubleLastValueAggregator( - Supplier> reservoirSupplier) { + Supplier> reservoirSupplier, MemoryMode memoryMode) { this.reservoirSupplier = reservoirSupplier; + this.memoryMode = memoryMode; } @Override public AggregatorHandle createHandle() { - return new Handle(reservoirSupplier.get()); + return new Handle(reservoirSupplier.get(), memoryMode); } @Override @@ -57,6 +61,11 @@ public DoublePointData diff(DoublePointData previous, DoublePointData current) { return current; } + @Override + public void diffInPlace(DoublePointData previousReusable, DoublePointData current) { + ((MutableDoublePointData) previousReusable).set(current); + } + @Override public DoublePointData toPoint(Measurement measurement) { return ImmutableDoublePointData.create( @@ -66,6 +75,26 @@ public DoublePointData toPoint(Measurement measurement) { measurement.doubleValue()); } + @Override + public void toPoint(Measurement measurement, DoublePointData reusablePoint) { + ((MutableDoublePointData) reusablePoint) + .set( + measurement.startEpochNanos(), + measurement.epochNanos(), + measurement.attributes(), + measurement.doubleValue()); + } + + @Override + public DoublePointData createReusablePoint() { + return new MutableDoublePointData(); + } + + @Override + public void copyPoint(DoublePointData point, DoublePointData toReusablePoint) { + ((MutableDoublePointData) toReusablePoint).set(point); + } + @Override public MetricData toMetricData( Resource resource, @@ -88,8 +117,16 @@ static final class Handle extends AggregatorHandle current = new AtomicReference<>(DEFAULT_VALUE); - private Handle(ExemplarReservoir reservoir) { + // Only used when memoryMode is REUSABLE_DATA + @Nullable private final MutableDoublePointData reusablePoint; + + private Handle(ExemplarReservoir reservoir, MemoryMode memoryMode) { super(reservoir); + if (memoryMode == MemoryMode.REUSABLE_DATA) { + reusablePoint = new MutableDoublePointData(); + } else { + reusablePoint = null; + } } @Override @@ -100,8 +137,14 @@ protected DoublePointData doAggregateThenMaybeReset( List exemplars, boolean reset) { Double value = reset ? this.current.getAndSet(DEFAULT_VALUE) : this.current.get(); - return ImmutableDoublePointData.create( - startEpochNanos, epochNanos, attributes, Objects.requireNonNull(value), exemplars); + if (reusablePoint != null) { + reusablePoint.set( + startEpochNanos, epochNanos, attributes, Objects.requireNonNull(value), exemplars); + return reusablePoint; + } else { + return ImmutableDoublePointData.create( + startEpochNanos, epochNanos, attributes, Objects.requireNonNull(value), exemplars); + } } @Override diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleSumAggregator.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleSumAggregator.java index 42189ac3d58..17f02f85208 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleSumAggregator.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleSumAggregator.java @@ -7,6 +7,7 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoublePointData; @@ -16,6 +17,7 @@ import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; +import io.opentelemetry.sdk.metrics.internal.data.MutableDoublePointData; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; @@ -24,6 +26,7 @@ import java.util.Collection; import java.util.List; import java.util.function.Supplier; +import javax.annotation.Nullable; /** * Sum aggregator that keeps values as {@code double}s. @@ -34,24 +37,28 @@ public final class DoubleSumAggregator extends AbstractSumAggregator { private final Supplier> reservoirSupplier; + private final MemoryMode memoryMode; /** * Constructs a sum aggregator. * * @param instrumentDescriptor The instrument being recorded, used to compute monotonicity. * @param reservoirSupplier Supplier of exemplar reservoirs per-stream. + * @param memoryMode The memory mode to use. */ public DoubleSumAggregator( InstrumentDescriptor instrumentDescriptor, - Supplier> reservoirSupplier) { + Supplier> reservoirSupplier, + MemoryMode memoryMode) { super(instrumentDescriptor); this.reservoirSupplier = reservoirSupplier; + this.memoryMode = memoryMode; } @Override public AggregatorHandle createHandle() { - return new Handle(reservoirSupplier.get()); + return new Handle(reservoirSupplier.get(), memoryMode); } @Override @@ -64,6 +71,17 @@ public DoublePointData diff(DoublePointData previousPoint, DoublePointData curre currentPoint.getExemplars()); } + @Override + public void diffInPlace(DoublePointData previousReusablePoint, DoublePointData currentPoint) { + ((MutableDoublePointData) previousReusablePoint) + .set( + currentPoint.getStartEpochNanos(), + currentPoint.getEpochNanos(), + currentPoint.getAttributes(), + currentPoint.getValue() - previousReusablePoint.getValue(), + currentPoint.getExemplars()); + } + @Override public DoublePointData toPoint(Measurement measurement) { return ImmutableDoublePointData.create( @@ -73,6 +91,26 @@ public DoublePointData toPoint(Measurement measurement) { measurement.doubleValue()); } + @Override + public void toPoint(Measurement measurement, DoublePointData reusablePoint) { + ((MutableDoublePointData) reusablePoint) + .set( + measurement.startEpochNanos(), + measurement.epochNanos(), + measurement.attributes(), + measurement.doubleValue()); + } + + @Override + public DoublePointData createReusablePoint() { + return new MutableDoublePointData(); + } + + @Override + public void copyPoint(DoublePointData point, DoublePointData toReusablePoint) { + ((MutableDoublePointData) toReusablePoint).set(point); + } + @Override public MetricData toMetricData( Resource resource, @@ -92,8 +130,12 @@ public MetricData toMetricData( static final class Handle extends AggregatorHandle { private final DoubleAdder current = AdderUtil.createDoubleAdder(); - Handle(ExemplarReservoir exemplarReservoir) { + // Only used if memoryMode == MemoryMode.REUSABLE_DATA + @Nullable private final MutableDoublePointData reusablePoint; + + Handle(ExemplarReservoir exemplarReservoir, MemoryMode memoryMode) { super(exemplarReservoir); + reusablePoint = memoryMode == MemoryMode.REUSABLE_DATA ? new MutableDoublePointData() : null; } @Override @@ -104,8 +146,13 @@ protected DoublePointData doAggregateThenMaybeReset( List exemplars, boolean reset) { double value = reset ? this.current.sumThenReset() : this.current.sum(); - return ImmutableDoublePointData.create( - startEpochNanos, epochNanos, attributes, value, exemplars); + if (reusablePoint != null) { + reusablePoint.set(startEpochNanos, epochNanos, attributes, value, exemplars); + return reusablePoint; + } else { + return ImmutableDoublePointData.create( + startEpochNanos, epochNanos, attributes, value, exemplars); + } } @Override diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExplicitBucketHistogramUtils.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExplicitBucketHistogramUtils.java index fd11247c57c..4d25b873f5a 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExplicitBucketHistogramUtils.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/ExplicitBucketHistogramUtils.java @@ -26,7 +26,8 @@ private ExplicitBucketHistogramUtils() {} /** Converts bucket boundary "convenient" configuration into the "more efficient" array. */ public static double[] createBoundaryArray(List boundaries) { - return validateBucketBoundaries(boundaries.stream().mapToDouble(i -> i).toArray()); + validateBucketBoundaries(boundaries); + return boundaries.stream().mapToDouble(i -> i).toArray(); } /** @@ -51,32 +52,30 @@ public static int findBucketIndex(double[] boundaries, double value) { * Validates errors in boundary configuration. * * @param boundaries The array of bucket boundaries. - * @return The original boundaries. * @throws IllegalArgumentException if boundaries are not specified correctly. */ - public static double[] validateBucketBoundaries(double[] boundaries) { + public static void validateBucketBoundaries(List boundaries) { for (double v : boundaries) { if (Double.isNaN(v)) { throw new IllegalArgumentException("invalid bucket boundary: NaN"); } } - for (int i = 1; i < boundaries.length; ++i) { - if (boundaries[i - 1] >= boundaries[i]) { + for (int i = 1; i < boundaries.size(); ++i) { + if (boundaries.get(i - 1) >= boundaries.get(i)) { throw new IllegalArgumentException( "Bucket boundaries must be in increasing order: " - + boundaries[i - 1] + + boundaries.get(i - 1) + " >= " - + boundaries[i]); + + boundaries.get(i)); } } - if (boundaries.length > 0) { - if (boundaries[0] == Double.NEGATIVE_INFINITY) { + if (!boundaries.isEmpty()) { + if (boundaries.get(0) == Double.NEGATIVE_INFINITY) { throw new IllegalArgumentException("invalid bucket boundary: -Inf"); } - if (boundaries[boundaries.length - 1] == Double.POSITIVE_INFINITY) { + if (boundaries.get(boundaries.size() - 1) == Double.POSITIVE_INFINITY) { throw new IllegalArgumentException("invalid bucket boundary: +Inf"); } } - return boundaries; } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongLastValueAggregator.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongLastValueAggregator.java index e1c310207d6..27422733c43 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongLastValueAggregator.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongLastValueAggregator.java @@ -7,6 +7,7 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.LongExemplarData; import io.opentelemetry.sdk.metrics.data.LongPointData; @@ -14,6 +15,7 @@ import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; +import io.opentelemetry.sdk.metrics.internal.data.MutableLongPointData; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.state.Measurement; @@ -38,14 +40,17 @@ */ public final class LongLastValueAggregator implements Aggregator { private final Supplier> reservoirSupplier; + private final MemoryMode memoryMode; - public LongLastValueAggregator(Supplier> reservoirSupplier) { + public LongLastValueAggregator( + Supplier> reservoirSupplier, MemoryMode memoryMode) { this.reservoirSupplier = reservoirSupplier; + this.memoryMode = memoryMode; } @Override public AggregatorHandle createHandle() { - return new Handle(reservoirSupplier.get()); + return new Handle(reservoirSupplier.get(), memoryMode); } @Override @@ -53,6 +58,11 @@ public LongPointData diff(LongPointData previous, LongPointData current) { return current; } + @Override + public void diffInPlace(LongPointData previousReusablePoint, LongPointData currentPoint) { + ((MutableLongPointData) previousReusablePoint).set(currentPoint); + } + @Override public LongPointData toPoint(Measurement measurement) { return ImmutableLongPointData.create( @@ -62,6 +72,26 @@ public LongPointData toPoint(Measurement measurement) { measurement.longValue()); } + @Override + public void toPoint(Measurement measurement, LongPointData reusablePoint) { + ((MutableLongPointData) reusablePoint) + .set( + measurement.startEpochNanos(), + measurement.epochNanos(), + measurement.attributes(), + measurement.longValue()); + } + + @Override + public LongPointData createReusablePoint() { + return new MutableLongPointData(); + } + + @Override + public void copyPoint(LongPointData point, LongPointData toReusablePoint) { + ((MutableLongPointData) toReusablePoint).set(point); + } + @Override public MetricData toMetricData( Resource resource, @@ -83,8 +113,16 @@ static final class Handle extends AggregatorHandle current = new AtomicReference<>(DEFAULT_VALUE); - Handle(ExemplarReservoir exemplarReservoir) { + // Only used when memoryMode is REUSABLE_DATA + @Nullable private final MutableLongPointData reusablePoint; + + Handle(ExemplarReservoir exemplarReservoir, MemoryMode memoryMode) { super(exemplarReservoir); + if (memoryMode == MemoryMode.REUSABLE_DATA) { + reusablePoint = new MutableLongPointData(); + } else { + reusablePoint = null; + } } @Override @@ -95,8 +133,15 @@ protected LongPointData doAggregateThenMaybeReset( List exemplars, boolean reset) { Long value = reset ? this.current.getAndSet(DEFAULT_VALUE) : this.current.get(); - return ImmutableLongPointData.create( - startEpochNanos, epochNanos, attributes, Objects.requireNonNull(value), exemplars); + + if (reusablePoint != null) { + reusablePoint.set( + startEpochNanos, epochNanos, attributes, Objects.requireNonNull(value), exemplars); + return reusablePoint; + } else { + return ImmutableLongPointData.create( + startEpochNanos, epochNanos, attributes, Objects.requireNonNull(value), exemplars); + } } @Override diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongSumAggregator.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongSumAggregator.java index 3899476591a..cfd23c0cf97 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongSumAggregator.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongSumAggregator.java @@ -7,6 +7,7 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.LongExemplarData; import io.opentelemetry.sdk.metrics.data.LongPointData; @@ -16,6 +17,7 @@ import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; +import io.opentelemetry.sdk.metrics.internal.data.MutableLongPointData; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; @@ -24,6 +26,7 @@ import java.util.Collection; import java.util.List; import java.util.function.Supplier; +import javax.annotation.Nullable; /** * Sum aggregator that keeps values as {@code long}s. @@ -35,17 +38,20 @@ public final class LongSumAggregator extends AbstractSumAggregator { private final Supplier> reservoirSupplier; + private final MemoryMode memoryMode; public LongSumAggregator( InstrumentDescriptor instrumentDescriptor, - Supplier> reservoirSupplier) { + Supplier> reservoirSupplier, + MemoryMode memoryMode) { super(instrumentDescriptor); this.reservoirSupplier = reservoirSupplier; + this.memoryMode = memoryMode; } @Override public AggregatorHandle createHandle() { - return new Handle(reservoirSupplier.get()); + return new Handle(reservoirSupplier.get(), memoryMode); } @Override @@ -58,6 +64,17 @@ public LongPointData diff(LongPointData previousPoint, LongPointData currentPoin currentPoint.getExemplars()); } + @Override + public void diffInPlace(LongPointData previousReusablePoint, LongPointData currentPoint) { + ((MutableLongPointData) previousReusablePoint) + .set( + currentPoint.getStartEpochNanos(), + currentPoint.getEpochNanos(), + currentPoint.getAttributes(), + currentPoint.getValue() - previousReusablePoint.getValue(), + currentPoint.getExemplars()); + } + @Override public LongPointData toPoint(Measurement measurement) { return ImmutableLongPointData.create( @@ -67,6 +84,26 @@ public LongPointData toPoint(Measurement measurement) { measurement.longValue()); } + @Override + public void toPoint(Measurement measurement, LongPointData reusablePoint) { + ((MutableLongPointData) reusablePoint) + .set( + measurement.startEpochNanos(), + measurement.epochNanos(), + measurement.attributes(), + measurement.longValue()); + } + + @Override + public LongPointData createReusablePoint() { + return new MutableLongPointData(); + } + + @Override + public void copyPoint(LongPointData point, LongPointData toReusablePoint) { + ((MutableLongPointData) toReusablePoint).set(point); + } + @Override public MetricData toMetricData( Resource resource, @@ -86,8 +123,13 @@ public MetricData toMetricData( static final class Handle extends AggregatorHandle { private final LongAdder current = AdderUtil.createLongAdder(); - Handle(ExemplarReservoir exemplarReservoir) { + // Only used if memoryMode == MemoryMode.REUSABLE_DATA + @Nullable private final MutableLongPointData reusablePointData; + + Handle(ExemplarReservoir exemplarReservoir, MemoryMode memoryMode) { super(exemplarReservoir); + reusablePointData = + memoryMode == MemoryMode.REUSABLE_DATA ? new MutableLongPointData() : null; } @Override @@ -98,8 +140,13 @@ protected LongPointData doAggregateThenMaybeReset( List exemplars, boolean reset) { long value = reset ? this.current.sumThenReset() : this.current.sum(); - return ImmutableLongPointData.create( - startEpochNanos, epochNanos, attributes, value, exemplars); + if (reusablePointData != null) { + reusablePointData.set(startEpochNanos, epochNanos, attributes, value, exemplars); + return reusablePointData; + } else { + return ImmutableLongPointData.create( + startEpochNanos, epochNanos, attributes, value, exemplars); + } } @Override diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/EmptyExponentialHistogramBuckets.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/EmptyExponentialHistogramBuckets.java new file mode 100644 index 00000000000..afcbddb52c5 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/EmptyExponentialHistogramBuckets.java @@ -0,0 +1,34 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.data; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * An empty {@link ExponentialHistogramBuckets} + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@AutoValue +public abstract class EmptyExponentialHistogramBuckets implements ExponentialHistogramBuckets { + + private static final Map ZERO_BUCKETS = + new ConcurrentHashMap<>(); + + EmptyExponentialHistogramBuckets() {} + + public static ExponentialHistogramBuckets get(int scale) { + return ZERO_BUCKETS.computeIfAbsent( + scale, + scale1 -> + new AutoValue_EmptyExponentialHistogramBuckets(scale1, 0, Collections.emptyList(), 0)); + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/HistogramPointDataValidations.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/HistogramPointDataValidations.java new file mode 100644 index 00000000000..02534dd138f --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/HistogramPointDataValidations.java @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.data; + +import io.opentelemetry.sdk.metrics.data.HistogramPointData; +import java.util.List; + +/** + * Validations for {@link HistogramPointData}. + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +final class HistogramPointDataValidations { + + private HistogramPointDataValidations() {} + + static void validateIsStrictlyIncreasing(List xs) { + for (int i = 0; i < xs.size() - 1; i++) { + if (xs.get(i).compareTo(xs.get(i + 1)) >= 0) { + throw new IllegalArgumentException("invalid boundaries: " + xs); + } + } + } + + static void validateFiniteBoundaries(List boundaries) { + if (!boundaries.isEmpty() + && (boundaries.get(0).isInfinite() || boundaries.get(boundaries.size() - 1).isInfinite())) { + throw new IllegalArgumentException("invalid boundaries: contains explicit +/-Inf"); + } + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableHistogramPointData.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableHistogramPointData.java index 1a07c708983..49b56b7d780 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableHistogramPointData.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableHistogramPointData.java @@ -5,6 +5,9 @@ package io.opentelemetry.sdk.metrics.internal.data; +import static io.opentelemetry.sdk.metrics.internal.data.HistogramPointDataValidations.validateFiniteBoundaries; +import static io.opentelemetry.sdk.metrics.internal.data.HistogramPointDataValidations.validateIsStrictlyIncreasing; + import com.google.auto.value.AutoValue; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.sdk.internal.PrimitiveLongList; @@ -85,13 +88,8 @@ public static ImmutableHistogramPointData create( + " instead of " + counts.size()); } - if (!isStrictlyIncreasing(boundaries)) { - throw new IllegalArgumentException("invalid boundaries: " + boundaries); - } - if (!boundaries.isEmpty() - && (boundaries.get(0).isInfinite() || boundaries.get(boundaries.size() - 1).isInfinite())) { - throw new IllegalArgumentException("invalid boundaries: contains explicit +/-Inf"); - } + validateIsStrictlyIncreasing(boundaries); + validateFiniteBoundaries(boundaries); long totalCount = 0; for (long c : PrimitiveLongList.toArray(counts)) { @@ -113,13 +111,4 @@ public static ImmutableHistogramPointData create( } ImmutableHistogramPointData() {} - - private static boolean isStrictlyIncreasing(List xs) { - for (int i = 0; i < xs.size() - 1; i++) { - if (xs.get(i).compareTo(xs.get(i + 1)) >= 0) { - return false; - } - } - return true; - } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableSummaryData.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableSummaryData.java index 7f145269341..0c1f4db1bd6 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableSummaryData.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableSummaryData.java @@ -25,7 +25,7 @@ * instruments. * *

    This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time + * at any time. */ @Immutable @AutoValue diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableSummaryPointData.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableSummaryPointData.java index 447ad45963d..54597c1acb9 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableSummaryPointData.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableSummaryPointData.java @@ -17,7 +17,7 @@ * A single data point that summarizes the values in a time series of numeric values. * *

    This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time + * at any time. */ @Immutable @AutoValue diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableValueAtQuantile.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableValueAtQuantile.java index 01995b28aa4..d8f31391cb8 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableValueAtQuantile.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/ImmutableValueAtQuantile.java @@ -13,7 +13,7 @@ * A summary metric value. * *

    This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time + * at any time. */ @Immutable @AutoValue diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableDoublePointData.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableDoublePointData.java new file mode 100644 index 00000000000..1f19c3911ae --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableDoublePointData.java @@ -0,0 +1,138 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.data; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; +import io.opentelemetry.sdk.metrics.data.DoublePointData; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * A mutable {@link DoublePointData} + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + *

    This class is not thread-safe. + */ +public class MutableDoublePointData implements DoublePointData { + + private long startEpochNanos; + private long epochNanos; + + private Attributes attributes = Attributes.empty(); + + private double value; + private List exemplars = Collections.emptyList(); + + @Override + public double getValue() { + return value; + } + + @Override + public long getStartEpochNanos() { + return startEpochNanos; + } + + @Override + public long getEpochNanos() { + return epochNanos; + } + + @Override + public Attributes getAttributes() { + return attributes; + } + + @Override + public List getExemplars() { + return exemplars; + } + + /** + * Sets all {@link MutableDoublePointData} values based on {@code point}. + * + * @param point The point to take the values from + */ + public void set(DoublePointData point) { + set( + point.getStartEpochNanos(), + point.getEpochNanos(), + point.getAttributes(), + point.getValue(), + point.getExemplars()); + } + + /** Sets all {@link MutableDoublePointData} values , besides exemplars which are set to empty. */ + public void set(long startEpochNanos, long epochNanos, Attributes attributes, double value) { + set(startEpochNanos, epochNanos, attributes, value, Collections.emptyList()); + } + + /** Sets all {@link MutableDoublePointData} values. */ + public void set( + long startEpochNanos, + long epochNanos, + Attributes attributes, + double value, + List exemplars) { + this.startEpochNanos = startEpochNanos; + this.epochNanos = epochNanos; + this.attributes = attributes; + this.value = value; + this.exemplars = exemplars; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof DoublePointData)) { + return false; + } + DoublePointData pointData = (DoublePointData) o; + return startEpochNanos == pointData.getStartEpochNanos() + && epochNanos == pointData.getEpochNanos() + && Double.doubleToLongBits(value) == Double.doubleToLongBits(pointData.getValue()) + && Objects.equals(attributes, pointData.getAttributes()) + && Objects.equals(exemplars, pointData.getExemplars()); + } + + @Override + public int hashCode() { + int hashcode = 1; + hashcode *= 1000003; + hashcode ^= (int) ((startEpochNanos >>> 32) ^ startEpochNanos); + hashcode *= 1000003; + hashcode ^= (int) ((epochNanos >>> 32) ^ epochNanos); + hashcode *= 1000003; + hashcode ^= attributes.hashCode(); + hashcode *= 1000003; + hashcode ^= (int) ((Double.doubleToLongBits(value) >>> 32) ^ Double.doubleToLongBits(value)); + hashcode *= 1000003; + hashcode ^= exemplars.hashCode(); + return hashcode; + } + + @Override + public String toString() { + return "MutableDoublePointData{" + + "startEpochNanos=" + + startEpochNanos + + ", epochNanos=" + + epochNanos + + ", attributes=" + + attributes + + ", value=" + + value + + ", exemplars=" + + exemplars + + '}'; + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableExponentialHistogramBuckets.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableExponentialHistogramBuckets.java new file mode 100644 index 00000000000..9554d4722ea --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableExponentialHistogramBuckets.java @@ -0,0 +1,102 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.data; + +import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; +import java.util.List; +import java.util.Objects; + +/** + * A mutable {@link ExponentialHistogramBuckets} + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + *

    This class is not thread-safe. + */ +public final class MutableExponentialHistogramBuckets implements ExponentialHistogramBuckets { + + private int scale; + private int offset; + private long totalCount; + private DynamicPrimitiveLongList bucketCounts = DynamicPrimitiveLongList.empty(); + + @Override + public int getScale() { + return scale; + } + + @Override + public int getOffset() { + return offset; + } + + @Override + public List getBucketCounts() { + return bucketCounts; + } + + public DynamicPrimitiveLongList getReusableBucketCountsList() { + return bucketCounts; + } + + @Override + public long getTotalCount() { + return totalCount; + } + + public MutableExponentialHistogramBuckets set( + int scale, int offset, long totalCount, DynamicPrimitiveLongList bucketCounts) { + this.scale = scale; + this.offset = offset; + this.totalCount = totalCount; + this.bucketCounts = bucketCounts; + + return this; + } + + @Override + public String toString() { + return "MutableExponentialHistogramBuckets{" + + "scale=" + + scale + + ", " + + "offset=" + + offset + + ", " + + "bucketCounts=" + + bucketCounts + + ", " + + "totalCount=" + + totalCount + + "}"; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof ExponentialHistogramBuckets) { + ExponentialHistogramBuckets that = (ExponentialHistogramBuckets) o; + return this.scale == that.getScale() + && this.offset == that.getOffset() + && this.totalCount == that.getTotalCount() + && Objects.equals(this.bucketCounts, that.getBucketCounts()); + } + return false; + } + + @Override + public int hashCode() { + int result = scale; + result = 31 * result + offset; + result = 31 * result + (int) (totalCount ^ (totalCount >>> 32)); + result = 31 * result + (bucketCounts != null ? bucketCounts.hashCode() : 0); + return result; + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableExponentialHistogramPointData.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableExponentialHistogramPointData.java new file mode 100644 index 00000000000..bccc7a9abe1 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableExponentialHistogramPointData.java @@ -0,0 +1,248 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.data; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData; +import java.util.Collections; +import java.util.List; + +/** + * A mutable {@link ExponentialHistogramPointData} + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + *

    This class is not thread-safe. + */ +public final class MutableExponentialHistogramPointData implements ExponentialHistogramPointData { + + private long startEpochNanos; + private long epochNanos; + private Attributes attributes = Attributes.empty(); + private int scale; + private double sum; + private long count; + private long zeroCount; + private boolean hasMin; + private double min; + private boolean hasMax; + private double max; + private ExponentialHistogramBuckets positiveBuckets = EmptyExponentialHistogramBuckets.get(0); + private ExponentialHistogramBuckets negativeBuckets = EmptyExponentialHistogramBuckets.get(0); + private List exemplars = Collections.emptyList(); + + @Override + public int getScale() { + return scale; + } + + @Override + public double getSum() { + return sum; + } + + @Override + public long getCount() { + return count; + } + + @Override + public long getZeroCount() { + return zeroCount; + } + + @Override + public boolean hasMin() { + return hasMin; + } + + @Override + public double getMin() { + return min; + } + + @Override + public boolean hasMax() { + return hasMax; + } + + @Override + public double getMax() { + return max; + } + + @Override + public ExponentialHistogramBuckets getPositiveBuckets() { + return positiveBuckets; + } + + @Override + public ExponentialHistogramBuckets getNegativeBuckets() { + return negativeBuckets; + } + + @Override + public long getStartEpochNanos() { + return startEpochNanos; + } + + @Override + public long getEpochNanos() { + return epochNanos; + } + + @Override + public Attributes getAttributes() { + return attributes; + } + + @Override + public List getExemplars() { + return exemplars; + } + + @SuppressWarnings("TooManyParameters") + public ExponentialHistogramPointData set( + int scale, + double sum, + long zeroCount, + boolean hasMin, + double min, + boolean hasMax, + double max, + ExponentialHistogramBuckets positiveBuckets, + ExponentialHistogramBuckets negativeBuckets, + long startEpochNanos, + long epochNanos, + Attributes attributes, + List exemplars) { + this.count = zeroCount + positiveBuckets.getTotalCount() + negativeBuckets.getTotalCount(); + this.scale = scale; + this.sum = sum; + this.zeroCount = zeroCount; + this.hasMin = hasMin; + this.min = min; + this.hasMax = hasMax; + this.max = max; + this.positiveBuckets = positiveBuckets; + this.negativeBuckets = negativeBuckets; + this.startEpochNanos = startEpochNanos; + this.epochNanos = epochNanos; + this.attributes = attributes; + this.exemplars = exemplars; + + return this; + } + + @Override + public String toString() { + return "MutableExponentialHistogramPointData{" + + "startEpochNanos=" + + startEpochNanos + + ", " + + "epochNanos=" + + epochNanos + + ", " + + "attributes=" + + attributes + + ", " + + "scale=" + + scale + + ", " + + "sum=" + + sum + + ", " + + "count=" + + count + + ", " + + "zeroCount=" + + zeroCount + + ", " + + "hasMin=" + + hasMin + + ", " + + "min=" + + min + + ", " + + "hasMax=" + + hasMax + + ", " + + "max=" + + max + + ", " + + "positiveBuckets=" + + positiveBuckets + + ", " + + "negativeBuckets=" + + negativeBuckets + + ", " + + "exemplars=" + + exemplars + + "}"; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof ExponentialHistogramPointData) { + ExponentialHistogramPointData that = (ExponentialHistogramPointData) o; + return this.startEpochNanos == that.getStartEpochNanos() + && this.epochNanos == that.getEpochNanos() + && this.attributes.equals(that.getAttributes()) + && this.scale == that.getScale() + && Double.doubleToLongBits(this.sum) == Double.doubleToLongBits(that.getSum()) + && this.count == that.getCount() + && this.zeroCount == that.getZeroCount() + && this.hasMin == that.hasMin() + && Double.doubleToLongBits(this.min) == Double.doubleToLongBits(that.getMin()) + && this.hasMax == that.hasMax() + && Double.doubleToLongBits(this.max) == Double.doubleToLongBits(that.getMax()) + && this.positiveBuckets.equals(that.getPositiveBuckets()) + && this.negativeBuckets.equals(that.getNegativeBuckets()) + && this.exemplars.equals(that.getExemplars()); + } + return false; + } + + @Override + public int hashCode() { + int hash = 1; + hash *= 1000003; + hash ^= (int) ((startEpochNanos >>> 32) ^ startEpochNanos); + hash *= 1000003; + hash ^= (int) ((epochNanos >>> 32) ^ epochNanos); + hash *= 1000003; + hash ^= attributes.hashCode(); + hash *= 1000003; + hash ^= scale; + hash *= 1000003; + hash ^= (int) ((Double.doubleToLongBits(sum) >>> 32) ^ Double.doubleToLongBits(sum)); + hash *= 1000003; + hash ^= (int) ((count >>> 32) ^ count); + hash *= 1000003; + hash ^= (int) ((zeroCount >>> 32) ^ zeroCount); + hash *= 1000003; + hash ^= hasMin ? 1231 : 1237; + hash *= 1000003; + hash ^= (int) ((Double.doubleToLongBits(min) >>> 32) ^ Double.doubleToLongBits(min)); + hash *= 1000003; + hash ^= hasMax ? 1231 : 1237; + hash *= 1000003; + hash ^= (int) ((Double.doubleToLongBits(max) >>> 32) ^ Double.doubleToLongBits(max)); + hash *= 1000003; + hash ^= positiveBuckets.hashCode(); + hash *= 1000003; + hash ^= negativeBuckets.hashCode(); + hash *= 1000003; + hash ^= exemplars.hashCode(); + return hash; + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableHistogramPointData.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableHistogramPointData.java new file mode 100644 index 00000000000..3c6e65c799c --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableHistogramPointData.java @@ -0,0 +1,249 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.data; + +import static io.opentelemetry.sdk.metrics.internal.data.HistogramPointDataValidations.validateFiniteBoundaries; +import static io.opentelemetry.sdk.metrics.internal.data.HistogramPointDataValidations.validateIsStrictlyIncreasing; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList; +import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; +import io.opentelemetry.sdk.metrics.data.HistogramPointData; +import java.util.Collections; +import java.util.List; + +/** + * A mutable {@link HistogramPointData} + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + *

    This class is not thread-safe. + */ +public final class MutableHistogramPointData implements HistogramPointData { + private long startEpochNanos; + private long epochNanos; + private Attributes attributes = Attributes.empty(); + private double sum; + private long count; + private boolean hasMin; + private double min; + private boolean hasMax; + private double max; + private List boundaries = Collections.emptyList(); + private final DynamicPrimitiveLongList counts; + private List exemplars = Collections.emptyList(); + + public MutableHistogramPointData(int buckets) { + this.counts = DynamicPrimitiveLongList.ofSubArrayCapacity(buckets); + this.counts.resizeAndClear(buckets); + } + + @SuppressWarnings({"TooManyParameters", "ForLoopReplaceableByForEach"}) + public MutableHistogramPointData set( + long startEpochNanos, + long epochNanos, + Attributes attributes, + double sum, + boolean hasMin, + double min, + boolean hasMax, + double max, + List boundaries, + long[] counts, + List exemplars) { + + if (this.counts.size() != boundaries.size() + 1) { + throw new IllegalArgumentException( + "invalid boundaries: size should be " + + (this.counts.size() - 1) + + " but was " + + boundaries.size()); + } + if (this.counts.size() != counts.length) { + throw new IllegalArgumentException( + "invalid counts: size should be " + this.counts.size() + " but was " + counts.length); + } + validateIsStrictlyIncreasing(boundaries); + validateFiniteBoundaries(boundaries); + + long totalCount = 0; + for (int i = 0; i < counts.length; i++) { + totalCount += counts[i]; + } + + this.startEpochNanos = startEpochNanos; + this.epochNanos = epochNanos; + this.attributes = attributes; + this.sum = sum; + this.count = totalCount; + this.hasMin = hasMin; + this.min = min; + this.hasMax = hasMax; + this.max = max; + this.boundaries = boundaries; + for (int i = 0; i < counts.length; i++) { + this.counts.setLong(i, counts[i]); + } + this.exemplars = exemplars; + + return this; + } + + @Override + public long getStartEpochNanos() { + return startEpochNanos; + } + + @Override + public long getEpochNanos() { + return epochNanos; + } + + @Override + public Attributes getAttributes() { + return attributes; + } + + @Override + public double getSum() { + return sum; + } + + @Override + public long getCount() { + return count; + } + + @Override + public boolean hasMin() { + return hasMin; + } + + @Override + public double getMin() { + return min; + } + + @Override + public boolean hasMax() { + return hasMax; + } + + @Override + public double getMax() { + return max; + } + + @Override + public List getBoundaries() { + return boundaries; + } + + @Override + public List getCounts() { + return counts; + } + + @Override + public List getExemplars() { + return exemplars; + } + + @Override + public String toString() { + return "MutableHistogramPointData{" + + "startEpochNanos=" + + startEpochNanos + + ", " + + "epochNanos=" + + epochNanos + + ", " + + "attributes=" + + attributes + + ", " + + "sum=" + + sum + + ", " + + "count=" + + count + + ", " + + "hasMin=" + + hasMin + + ", " + + "min=" + + min + + ", " + + "hasMax=" + + hasMax + + ", " + + "max=" + + max + + ", " + + "boundaries=" + + boundaries + + ", " + + "counts=" + + counts + + ", " + + "exemplars=" + + exemplars + + "}"; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof HistogramPointData) { + HistogramPointData that = (HistogramPointData) o; + return this.startEpochNanos == that.getStartEpochNanos() + && this.epochNanos == that.getEpochNanos() + && this.attributes.equals(that.getAttributes()) + && Double.doubleToLongBits(this.sum) == Double.doubleToLongBits(that.getSum()) + && this.count == that.getCount() + && this.hasMin == that.hasMin() + && Double.doubleToLongBits(this.min) == Double.doubleToLongBits(that.getMin()) + && this.hasMax == that.hasMax() + && Double.doubleToLongBits(this.max) == Double.doubleToLongBits(that.getMax()) + && this.boundaries.equals(that.getBoundaries()) + && this.counts.equals(that.getCounts()) + && this.exemplars.equals(that.getExemplars()); + } + return false; + } + + @Override + public int hashCode() { + int hashcode = 1; + hashcode *= 1000003; + hashcode ^= (int) ((startEpochNanos >>> 32) ^ startEpochNanos); + hashcode *= 1000003; + hashcode ^= (int) ((epochNanos >>> 32) ^ epochNanos); + hashcode *= 1000003; + hashcode ^= attributes.hashCode(); + hashcode *= 1000003; + hashcode ^= (int) ((Double.doubleToLongBits(sum) >>> 32) ^ Double.doubleToLongBits(sum)); + hashcode *= 1000003; + hashcode ^= (int) ((count >>> 32) ^ count); + hashcode *= 1000003; + hashcode ^= hasMin ? 1231 : 1237; + hashcode *= 1000003; + hashcode ^= (int) ((Double.doubleToLongBits(min) >>> 32) ^ Double.doubleToLongBits(min)); + hashcode *= 1000003; + hashcode ^= hasMax ? 1231 : 1237; + hashcode *= 1000003; + hashcode ^= (int) ((Double.doubleToLongBits(max) >>> 32) ^ Double.doubleToLongBits(max)); + hashcode *= 1000003; + hashcode ^= boundaries.hashCode(); + hashcode *= 1000003; + hashcode ^= counts.hashCode(); + hashcode *= 1000003; + hashcode ^= exemplars.hashCode(); + return hashcode; + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableLongPointData.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableLongPointData.java new file mode 100644 index 00000000000..679a9e4902f --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/data/MutableLongPointData.java @@ -0,0 +1,136 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.data; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.metrics.data.LongExemplarData; +import io.opentelemetry.sdk.metrics.data.LongPointData; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * Mutable {@link LongPointData} + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + *

    This class is not thread-safe. + */ +public class MutableLongPointData implements LongPointData { + + private long value; + private long startEpochNanos; + private long epochNanos; + private Attributes attributes = Attributes.empty(); + private List exemplars = Collections.emptyList(); + + @Override + public long getValue() { + return value; + } + + @Override + public long getStartEpochNanos() { + return startEpochNanos; + } + + @Override + public long getEpochNanos() { + return epochNanos; + } + + @Override + public Attributes getAttributes() { + return attributes; + } + + @Override + public List getExemplars() { + return exemplars; + } + + /** + * Sets all {@link MutableDoublePointData} based on {@code point}. + * + * @param point The point to set values upon + */ + public void set(LongPointData point) { + set( + point.getStartEpochNanos(), + point.getEpochNanos(), + point.getAttributes(), + point.getValue(), + point.getExemplars()); + } + + /** Sets all {@link MutableDoublePointData} values besides exemplars which are set to be empty. */ + public void set(long startEpochNanos, long epochNanos, Attributes attributes, long value) { + set(startEpochNanos, epochNanos, attributes, value, Collections.emptyList()); + } + + /** Sets all {@link MutableDoublePointData} values. */ + public void set( + long startEpochNanos, + long epochNanos, + Attributes attributes, + long value, + List exemplars) { + this.startEpochNanos = startEpochNanos; + this.epochNanos = epochNanos; + this.attributes = attributes; + this.value = value; + this.exemplars = exemplars; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof LongPointData)) { + return false; + } + LongPointData that = (LongPointData) o; + return value == that.getValue() + && startEpochNanos == that.getStartEpochNanos() + && epochNanos == that.getEpochNanos() + && Objects.equals(attributes, that.getAttributes()) + && Objects.equals(exemplars, that.getExemplars()); + } + + @Override + public int hashCode() { + int hashcode = 1; + hashcode *= 1000003; + hashcode ^= (int) ((startEpochNanos >>> 32) ^ startEpochNanos); + hashcode *= 1000003; + hashcode ^= (int) ((epochNanos >>> 32) ^ epochNanos); + hashcode *= 1000003; + hashcode ^= attributes.hashCode(); + hashcode *= 1000003; + hashcode ^= (int) ((value >>> 32) ^ value); + hashcode *= 1000003; + hashcode ^= exemplars.hashCode(); + return hashcode; + } + + @Override + public String toString() { + return "MutableLongPointData{" + + "value=" + + value + + ", startEpochNanos=" + + startEpochNanos + + ", epochNanos=" + + epochNanos + + ", attributes=" + + attributes + + ", exemplars=" + + exemplars + + '}'; + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/debug/DebugConfig.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/debug/DebugConfig.java index db5b82081d4..7228aa2be4e 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/debug/DebugConfig.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/debug/DebugConfig.java @@ -15,6 +15,8 @@ */ public final class DebugConfig { private static final String ENABLE_METRICS_DEBUG_PROPERTY = "otel.experimental.sdk.metrics.debug"; + + @SuppressWarnings("NonFinalStaticField") private static boolean enabled; private DebugConfig() {} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/debug/StackTraceSourceInfo.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/debug/StackTraceSourceInfo.java index a8b43be5fd2..64a18ad6736 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/debug/StackTraceSourceInfo.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/debug/StackTraceSourceInfo.java @@ -5,6 +5,8 @@ package io.opentelemetry.sdk.metrics.internal.debug; +import java.util.Locale; + /** Diagnostic information derived from stack traces. */ final class StackTraceSourceInfo implements SourceInfo { @@ -19,7 +21,7 @@ public String shortDebugString() { if (stackTraceElements.length > 0) { for (StackTraceElement e : stackTraceElements) { if (isInterestingStackTrace(e)) { - return String.format("%s:%d", e.getFileName(), e.getLineNumber()); + return String.format(Locale.ROOT, "%s:%d", e.getFileName(), e.getLineNumber()); } } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/descriptor/Advice.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/descriptor/Advice.java index 96302ecfbe4..768b3afc297 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/descriptor/Advice.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/descriptor/Advice.java @@ -13,6 +13,10 @@ import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ @AutoValue @Immutable public abstract class Advice { @@ -39,6 +43,10 @@ public boolean hasAttributes() { return getAttributes() != null; } + /** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at + * any time. + */ @AutoValue.Builder public abstract static class AdviceBuilder { diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/ExemplarReservoir.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/ExemplarReservoir.java index cebd4683fbb..c0de6e0df6a 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/ExemplarReservoir.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/ExemplarReservoir.java @@ -8,6 +8,7 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.Clock; +import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.LongExemplarData; @@ -25,6 +26,19 @@ */ public interface ExemplarReservoir { + /** + * Wraps an {@link ExemplarReservoir}, casting calls from {@link + * ExemplarReservoir#offerLongMeasurement(long, Attributes, Context)} to {@link + * ExemplarReservoir#offerDoubleMeasurement(double, Attributes, Context)} such that {@link + * ExemplarReservoir#collectAndReset(Attributes)} only returns {@link DoubleExemplarData}. + * + *

    This is used for {@link Aggregation#explicitBucketHistogram()} and {@link + * Aggregation#base2ExponentialBucketHistogram()} which only support double measurements. + */ + static ExemplarReservoir longToDouble(ExemplarReservoir delegate) { + return new LongToDoubleExemplarReservoir<>(delegate); + } + /** Wraps a {@link ExemplarReservoir} with a measurement pre-filter. */ static ExemplarReservoir filtered( ExemplarFilter filter, ExemplarReservoir original) { diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/FixedSizeExemplarReservoir.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/FixedSizeExemplarReservoir.java index 05dfdd1afd3..20254de5991 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/FixedSizeExemplarReservoir.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/FixedSizeExemplarReservoir.java @@ -13,13 +13,16 @@ import java.util.Collections; import java.util.List; import java.util.function.BiFunction; +import javax.annotation.Nullable; /** Base for fixed-size reservoir sampling of Exemplars. */ abstract class FixedSizeExemplarReservoir implements ExemplarReservoir { - private final ReservoirCell[] storage; + @Nullable private ReservoirCell[] storage; private final ReservoirCellSelector reservoirCellSelector; private final BiFunction mapAndResetCell; + private final int size; + private final Clock clock; private volatile boolean hasMeasurements = false; /** Instantiates an exemplar reservoir of fixed size. */ @@ -28,16 +31,18 @@ abstract class FixedSizeExemplarReservoir implements Exe int size, ReservoirCellSelector reservoirCellSelector, BiFunction mapAndResetCell) { - this.storage = new ReservoirCell[size]; - for (int i = 0; i < size; ++i) { - this.storage[i] = new ReservoirCell(clock); - } + this.storage = null; // lazily initialize to avoid allocations + this.size = size; + this.clock = clock; this.reservoirCellSelector = reservoirCellSelector; this.mapAndResetCell = mapAndResetCell; } @Override public void offerLongMeasurement(long value, Attributes attributes, Context context) { + if (storage == null) { + storage = initStorage(); + } int bucket = reservoirCellSelector.reservoirCellIndexFor(storage, value, attributes, context); if (bucket != -1) { this.storage[bucket].recordLongMeasurement(value, attributes, context); @@ -47,6 +52,9 @@ public void offerLongMeasurement(long value, Attributes attributes, Context cont @Override public void offerDoubleMeasurement(double value, Attributes attributes, Context context) { + if (storage == null) { + storage = initStorage(); + } int bucket = reservoirCellSelector.reservoirCellIndexFor(storage, value, attributes, context); if (bucket != -1) { this.storage[bucket].recordDoubleMeasurement(value, attributes, context); @@ -54,9 +62,17 @@ public void offerDoubleMeasurement(double value, Attributes attributes, Context } } + private ReservoirCell[] initStorage() { + ReservoirCell[] storage = new ReservoirCell[this.size]; + for (int i = 0; i < size; ++i) { + storage[i] = new ReservoirCell(this.clock); + } + return storage; + } + @Override public List collectAndReset(Attributes pointAttributes) { - if (!hasMeasurements) { + if (!hasMeasurements || storage == null) { return Collections.emptyList(); } // Note: we are collecting exemplars from buckets piecemeal, but we diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongToDoubleExemplarReservoir.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongToDoubleExemplarReservoir.java new file mode 100644 index 00000000000..c3a6b98fced --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongToDoubleExemplarReservoir.java @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.exemplar; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.metrics.data.ExemplarData; +import java.util.List; + +class LongToDoubleExemplarReservoir implements ExemplarReservoir { + + private final ExemplarReservoir delegate; + + LongToDoubleExemplarReservoir(ExemplarReservoir delegate) { + this.delegate = delegate; + } + + @Override + public void offerDoubleMeasurement(double value, Attributes attributes, Context context) { + delegate.offerDoubleMeasurement(value, attributes, context); + } + + @Override + public void offerLongMeasurement(long value, Attributes attributes, Context context) { + offerDoubleMeasurement((double) value, attributes, context); + } + + @Override + public List collectAndReset(Attributes pointAttributes) { + return delegate.collectAndReset(pointAttributes); + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/ReservoirCell.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/ReservoirCell.java index 1adf684531a..4fd63edcd80 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/ReservoirCell.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/exemplar/ReservoirCell.java @@ -68,8 +68,8 @@ synchronized void recordDoubleMeasurement(double value, Attributes attributes, C private void offerMeasurement(Attributes attributes, Context context) { this.attributes = attributes; - // Note: It may make sense in the future to attempt to pull this from an active span. - this.recordTime = clock.now(); + // High precision time is not worth the additional performance expense it incurs for exemplars + this.recordTime = clock.now(/* highPrecision= */ false); Span current = Span.fromContext(context); if (current.getSpanContext().isValid()) { this.spanContext = current.getSpanContext(); diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/export/MetricProducer.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/export/MetricProducer.java deleted file mode 100644 index 32e036b9d10..00000000000 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/export/MetricProducer.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.metrics.internal.export; - -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.export.CollectionRegistration; -import io.opentelemetry.sdk.metrics.export.MetricReader; -import java.util.Collection; -import java.util.Collections; -import javax.annotation.concurrent.ThreadSafe; - -/** - * {@code MetricProducer} is the interface that is used to make metric data available to the {@link - * MetricReader}s. Implementations should be stateful, in that each call to {@link - * #collectAllMetrics()} will return any metric generated since the last call was made. - * - *

    Implementations must be thread-safe. - * - *

    This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time. - */ -@ThreadSafe -public interface MetricProducer extends CollectionRegistration { - - /** Cast the registration to a {@link MetricProducer}. */ - static MetricProducer asMetricProducer(CollectionRegistration registration) { - if (!(registration instanceof MetricProducer)) { - throw new IllegalArgumentException( - "unrecognized CollectionRegistration, custom MetricReader implementations are not currently supported"); - } - return (MetricProducer) registration; - } - - /** Return a noop {@link MetricProducer}. */ - static MetricProducer noop() { - return Collections::emptyList; - } - - /** - * Returns a collection of produced {@link MetricData}s to be exported. This will only be those - * metrics that have been produced since the last time this method was called. - * - * @return a collection of produced {@link MetricData}s to be exported. - */ - Collection collectAllMetrics(); -} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/export/RegisteredReader.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/export/RegisteredReader.java index fbb587741b0..c56d01b9cda 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/export/RegisteredReader.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/export/RegisteredReader.java @@ -8,6 +8,7 @@ import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.PointData; +import io.opentelemetry.sdk.metrics.export.MetricProducer; import io.opentelemetry.sdk.metrics.export.MetricReader; import io.opentelemetry.sdk.metrics.internal.view.ViewRegistry; import java.util.concurrent.atomic.AtomicInteger; diff --git a/exporters/jaeger-thrift/src/main/java/io/opentelemetry/exporter/jaeger/thrift/package-info.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/package-info.java similarity index 65% rename from exporters/jaeger-thrift/src/main/java/io/opentelemetry/exporter/jaeger/thrift/package-info.java rename to sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/package-info.java index 1173e8d8452..13b7ee33253 100644 --- a/exporters/jaeger-thrift/src/main/java/io/opentelemetry/exporter/jaeger/thrift/package-info.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/package-info.java @@ -3,7 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ +/** Internal SDK implementation classes. */ @ParametersAreNonnullByDefault -package io.opentelemetry.exporter.jaeger.thrift; +package io.opentelemetry.sdk.metrics.internal; import javax.annotation.ParametersAreNonnullByDefault; diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ArrayBasedStack.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ArrayBasedStack.java new file mode 100644 index 00000000000..907ec7e2234 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ArrayBasedStack.java @@ -0,0 +1,80 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import javax.annotation.Nullable; + +/** + * Array-based Stack. + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + *

    This class is not thread-safe. + */ +public final class ArrayBasedStack { + // Visible for test + static final int DEFAULT_CAPACITY = 10; + + // NOTE (asafm): Using native array instead of ArrayList since I plan to add eviction + // if the initial portion of the stack is not used for several cycles of collection + private T[] array; + + private int size; + + @SuppressWarnings("unchecked") + public ArrayBasedStack() { + array = (T[]) new Object[DEFAULT_CAPACITY]; + size = 0; + } + + /** + * Add {@code element} to the top of the stack (LIFO). + * + * @param element The element to add + * @throws NullPointerException if {@code element} is null + */ + public void push(T element) { + if (element == null) { + throw new NullPointerException("Null is not permitted as element in the stack"); + } + if (size == array.length) { + resizeArray(array.length * 2); + } + array[size++] = element; + } + + /** + * Removes and returns an element from the top of the stack (LIFO). + * + * @return the top most element in the stack (last one added) + */ + @Nullable + public T pop() { + if (isEmpty()) { + return null; + } + T element = array[size - 1]; + array[size - 1] = null; + size--; + return element; + } + + public boolean isEmpty() { + return size == 0; + } + + public int size() { + return size; + } + + @SuppressWarnings("unchecked") + private void resizeArray(int newCapacity) { + T[] newArray = (T[]) new Object[newCapacity]; + System.arraycopy(array, 0, newArray, 0, size); + array = newArray; + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/AsynchronousMetricStorage.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/AsynchronousMetricStorage.java index 2e088541d10..42110e2cb1e 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/AsynchronousMetricStorage.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/AsynchronousMetricStorage.java @@ -5,11 +5,14 @@ package io.opentelemetry.sdk.metrics.internal.state; +import static io.opentelemetry.sdk.common.export.MemoryMode.REUSABLE_DATA; + import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; import io.opentelemetry.api.metrics.ObservableLongMeasurement; import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.internal.ThrottlingLogger; import io.opentelemetry.sdk.metrics.View; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; @@ -25,6 +28,8 @@ import io.opentelemetry.sdk.metrics.internal.view.AttributesProcessor; import io.opentelemetry.sdk.metrics.internal.view.RegisteredView; import io.opentelemetry.sdk.resources.Resource; +import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.logging.Level; @@ -36,7 +41,7 @@ *

    This class is internal and is hence not for public use. Its APIs are unstable and can change * at any time. */ -final class AsynchronousMetricStorage +public final class AsynchronousMetricStorage implements MetricStorage { private static final Logger logger = Logger.getLogger(AsynchronousMetricStorage.class.getName()); @@ -53,9 +58,18 @@ final class AsynchronousMetricStorage points = new HashMap<>(); - private Map lastPoints = - new HashMap<>(); // Only populated if aggregationTemporality == DELTA + private Map points; + + // Only populated if aggregationTemporality == DELTA + private Map lastPoints; + + // Only populated if memoryMode == REUSABLE_DATA + private final ObjectPool reusablePointsPool; + + // Only populated if memoryMode == REUSABLE_DATA + private final ArrayList reusableResultList = new ArrayList<>(); + + private final MemoryMode memoryMode; private AsynchronousMetricStorage( RegisteredReader registeredReader, @@ -69,25 +83,38 @@ private AsynchronousMetricStorage( registeredReader .getReader() .getAggregationTemporality(metricDescriptor.getSourceInstrument().getType()); + this.memoryMode = registeredReader.getReader().getMemoryMode(); this.aggregator = aggregator; this.attributesProcessor = attributesProcessor; this.maxCardinality = maxCardinality - 1; + this.reusablePointsPool = new ObjectPool<>(aggregator::createReusablePoint); + if (memoryMode == REUSABLE_DATA) { + lastPoints = new PooledHashMap<>(); + points = new PooledHashMap<>(); + } else { + lastPoints = new HashMap<>(); + points = new HashMap<>(); + } } /** * Create an asynchronous storage instance for the {@link View} and {@link InstrumentDescriptor}. */ // TODO(anuraaga): The cast to generic type here looks suspicious. - static AsynchronousMetricStorage create( - RegisteredReader registeredReader, - RegisteredView registeredView, - InstrumentDescriptor instrumentDescriptor) { + public static + AsynchronousMetricStorage create( + RegisteredReader registeredReader, + RegisteredView registeredView, + InstrumentDescriptor instrumentDescriptor) { View view = registeredView.getView(); MetricDescriptor metricDescriptor = MetricDescriptor.create(view, registeredView.getViewSourceInfo(), instrumentDescriptor); Aggregator aggregator = ((AggregatorFactory) view.getAggregation()) - .createAggregator(instrumentDescriptor, ExemplarFilter.alwaysOff()); + .createAggregator( + instrumentDescriptor, + ExemplarFilter.alwaysOff(), + registeredReader.getReader().getMemoryMode()); return new AsynchronousMetricStorage<>( registeredReader, metricDescriptor, @@ -107,12 +134,9 @@ void record(Measurement measurement) { aggregationTemporality == AggregationTemporality.DELTA ? registeredReader.getLastCollectEpochNanos() : measurement.startEpochNanos(); - measurement = - measurement.hasDoubleValue() - ? Measurement.doubleMeasurement( - start, measurement.epochNanos(), measurement.doubleValue(), processedAttributes) - : Measurement.longMeasurement( - start, measurement.epochNanos(), measurement.longValue(), processedAttributes); + + measurement = measurement.withAttributes(processedAttributes).withStartEpochNanos(start); + recordPoint(processedAttributes, measurement); } @@ -126,18 +150,7 @@ private void recordPoint(Attributes attributes, Measurement measurement) { + maxCardinality + ")."); attributes = MetricStorage.CARDINALITY_OVERFLOW; - measurement = - measurement.hasDoubleValue() - ? Measurement.doubleMeasurement( - measurement.startEpochNanos(), - measurement.epochNanos(), - measurement.doubleValue(), - attributes) - : Measurement.longMeasurement( - measurement.startEpochNanos(), - measurement.epochNanos(), - measurement.longValue(), - attributes); + measurement = measurement.withAttributes(attributes); } else if (points.containsKey( attributes)) { // Check there is not already a recording for the attributes throttlingLogger.log( @@ -149,7 +162,15 @@ private void recordPoint(Attributes attributes, Measurement measurement) { return; } - points.put(attributes, aggregator.toPoint(measurement)); + T dataPoint; + if (memoryMode == REUSABLE_DATA) { + dataPoint = reusablePointsPool.borrowObject(); + aggregator.toPoint(measurement, dataPoint); + } else { + dataPoint = aggregator.toPoint(measurement); + } + + points.put(attributes, dataPoint); } @Override @@ -168,25 +189,76 @@ public MetricData collect( InstrumentationScopeInfo instrumentationScopeInfo, long startEpochNanos, long epochNanos) { - Map result; + if (memoryMode == REUSABLE_DATA) { + // Collect can not run concurrently for same reader, hence we safely assume + // the previous collect result has been used and done with + reusableResultList.forEach(reusablePointsPool::returnObject); + reusableResultList.clear(); + } + + Collection result; if (aggregationTemporality == AggregationTemporality.DELTA) { Map points = this.points; Map lastPoints = this.lastPoints; - lastPoints.entrySet().removeIf(entry -> !points.containsKey(entry.getKey())); + + Collection deltaPoints; + if (memoryMode == REUSABLE_DATA) { + deltaPoints = reusableResultList; + } else { + deltaPoints = new ArrayList<>(); + } + points.forEach( - (k, v) -> lastPoints.compute(k, (k2, v2) -> v2 == null ? v : aggregator.diff(v2, v))); - result = lastPoints; + (k, v) -> { + T lastPoint = lastPoints.get(k); + + T deltaPoint; + if (lastPoint == null) { + if (memoryMode == REUSABLE_DATA) { + deltaPoint = reusablePointsPool.borrowObject(); + aggregator.copyPoint(v, deltaPoint); + } else { + deltaPoint = v; + } + } else { + if (memoryMode == REUSABLE_DATA) { + aggregator.diffInPlace(lastPoint, v); + deltaPoint = lastPoint; + + // Remaining last points are returned to reusablePointsPool, but + // this reusable point is still used, so don't return it to pool yet + lastPoints.remove(k); + } else { + deltaPoint = aggregator.diff(lastPoint, v); + } + } + + deltaPoints.add(deltaPoint); + }); + + if (memoryMode == REUSABLE_DATA) { + lastPoints.forEach((k, v) -> reusablePointsPool.returnObject(v)); + lastPoints.clear(); + this.points = lastPoints; + } else { + this.points = new HashMap<>(); + } + this.lastPoints = points; - } else { - result = points; + result = deltaPoints; + } else /* CUMULATIVE */ { + if (memoryMode == REUSABLE_DATA) { + points.forEach((k, v) -> reusableResultList.add(v)); + points.clear(); + result = reusableResultList; + } else { + result = points.values(); + points = new HashMap<>(); + } } - this.points = new HashMap<>(); + return aggregator.toMetricData( - resource, - instrumentationScopeInfo, - metricDescriptor, - result.values(), - aggregationTemporality); + resource, instrumentationScopeInfo, metricDescriptor, result, aggregationTemporality); } @Override diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/CallbackRegistration.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/CallbackRegistration.java index 6634dd728ac..fec792096a2 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/CallbackRegistration.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/CallbackRegistration.java @@ -70,7 +70,7 @@ public String toString() { return "CallbackRegistration{instrumentDescriptors=" + instrumentDescriptors + "}"; } - void invokeCallback(RegisteredReader reader, long startEpochNanos, long epochNanos) { + public void invokeCallback(RegisteredReader reader, long startEpochNanos, long epochNanos) { // Return early if no storages are registered if (!hasStorages) { return; diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/DefaultSynchronousMetricStorage.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/DefaultSynchronousMetricStorage.java index 44c5050dbd4..c0deda9d068 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/DefaultSynchronousMetricStorage.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/DefaultSynchronousMetricStorage.java @@ -5,9 +5,14 @@ package io.opentelemetry.sdk.metrics.internal.state; +import static io.opentelemetry.sdk.common.export.MemoryMode.IMMUTABLE_DATA; +import static io.opentelemetry.sdk.common.export.MemoryMode.REUSABLE_DATA; +import static io.opentelemetry.sdk.metrics.data.AggregationTemporality.DELTA; + import io.opentelemetry.api.common.Attributes; import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.internal.ThrottlingLogger; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.ExemplarData; @@ -26,6 +31,7 @@ import java.util.Queue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; import java.util.logging.Logger; @@ -46,10 +52,19 @@ public final class DefaultSynchronousMetricStorage aggregator; - private final ConcurrentHashMap> aggregatorHandles = - new ConcurrentHashMap<>(); + private volatile AggregatorHolder aggregatorHolder = new AggregatorHolder<>(); private final AttributesProcessor attributesProcessor; + private final MemoryMode memoryMode; + + // Only populated if memoryMode == REUSABLE_DATA + private final ArrayList reusableResultList = new ArrayList<>(); + + // Only populated if memoryMode == REUSABLE_DATA and + // aggregationTemporality is DELTA + private volatile ConcurrentHashMap> + previousCollectionAggregatorHandles = new ConcurrentHashMap<>(); + /** * This field is set to 1 less than the actual intended cardinality limit, allowing the last slot * to be filled by the {@link MetricStorage#CARDINALITY_OVERFLOW} series. @@ -74,6 +89,7 @@ public final class DefaultSynchronousMetricStorage> getAggregatorHandlePool() { @Override public void recordLong(long value, Attributes attributes, Context context) { - AggregatorHandle handle = getAggregatorHandle(attributes, context); - handle.recordLong(value, attributes, context); + AggregatorHolder aggregatorHolder = getHolderForRecord(); + try { + AggregatorHandle handle = + getAggregatorHandle(aggregatorHolder.aggregatorHandles, attributes, context); + handle.recordLong(value, attributes, context); + } finally { + releaseHolderForRecord(aggregatorHolder); + } } @Override public void recordDouble(double value, Attributes attributes, Context context) { - AggregatorHandle handle = getAggregatorHandle(attributes, context); - handle.recordDouble(value, attributes, context); + if (Double.isNaN(value)) { + logger.log( + Level.FINE, + "Instrument " + + metricDescriptor.getSourceInstrument().getName() + + " has recorded measurement Not-a-Number (NaN) value with attributes " + + attributes + + ". Dropping measurement."); + return; + } + AggregatorHolder aggregatorHolder = getHolderForRecord(); + try { + AggregatorHandle handle = + getAggregatorHandle(aggregatorHolder.aggregatorHandles, attributes, context); + handle.recordDouble(value, attributes, context); + } finally { + releaseHolderForRecord(aggregatorHolder); + } + } + + @Override + public boolean isEnabled() { + return true; + } + + /** + * Obtain the AggregatorHolder for recording measurements, re-reading the volatile + * this.aggregatorHolder until we access one where recordsInProgress is even. Collect sets + * recordsInProgress to odd as a signal that AggregatorHolder is stale and is being replaced. + * Record operations increment recordInProgress by 2. Callers MUST call {@link + * #releaseHolderForRecord(AggregatorHolder)} when record operation completes to signal to that + * its safe to proceed with Collect operations. + */ + private AggregatorHolder getHolderForRecord() { + do { + AggregatorHolder aggregatorHolder = this.aggregatorHolder; + int recordsInProgress = aggregatorHolder.activeRecordingThreads.addAndGet(2); + if (recordsInProgress % 2 == 0) { + return aggregatorHolder; + } else { + // Collect is in progress, decrement recordsInProgress to allow collect to proceed and + // re-read aggregatorHolder + aggregatorHolder.activeRecordingThreads.addAndGet(-2); + } + } while (true); + } + + /** + * Called on the {@link AggregatorHolder} obtained from {@link #getHolderForRecord()} to indicate + * that recording is complete, and it is safe to collect. + */ + private void releaseHolderForRecord(AggregatorHolder aggregatorHolder) { + aggregatorHolder.activeRecordingThreads.addAndGet(-2); } - private AggregatorHandle getAggregatorHandle(Attributes attributes, Context context) { + private AggregatorHandle getAggregatorHandle( + ConcurrentHashMap> aggregatorHandles, + Attributes attributes, + Context context) { Objects.requireNonNull(attributes, "attributes"); attributes = attributesProcessor.process(attributes, context); AggregatorHandle handle = aggregatorHandles.get(attributes); @@ -130,22 +206,83 @@ public MetricData collect( InstrumentationScopeInfo instrumentationScopeInfo, long startEpochNanos, long epochNanos) { - boolean reset = aggregationTemporality == AggregationTemporality.DELTA; + boolean reset = aggregationTemporality == DELTA; long start = - aggregationTemporality == AggregationTemporality.DELTA + aggregationTemporality == DELTA ? registeredReader.getLastCollectEpochNanos() : startEpochNanos; + ConcurrentHashMap> aggregatorHandles; + if (reset) { + AggregatorHolder holder = this.aggregatorHolder; + this.aggregatorHolder = + (memoryMode == REUSABLE_DATA) + ? new AggregatorHolder<>(previousCollectionAggregatorHandles) + : new AggregatorHolder<>(); + + // Increment recordsInProgress by 1, which produces an odd number acting as a signal that + // record operations should re-read the volatile this.aggregatorHolder. + // Repeatedly grab recordsInProgress until it is <= 1, which signals all active record + // operations are complete. + int recordsInProgress = holder.activeRecordingThreads.addAndGet(1); + while (recordsInProgress > 1) { + recordsInProgress = holder.activeRecordingThreads.get(); + } + aggregatorHandles = holder.aggregatorHandles; + } else { + aggregatorHandles = this.aggregatorHolder.aggregatorHandles; + } + + List points; + if (memoryMode == REUSABLE_DATA) { + reusableResultList.clear(); + points = reusableResultList; + } else { + points = new ArrayList<>(aggregatorHandles.size()); + } + + // In DELTA aggregation temporality each Attributes is reset to 0 + // every time we perform a collection (by definition of DELTA). + // In IMMUTABLE_DATA MemoryMode, this is accomplished by removing all aggregator handles + // (into which the values are recorded) effectively starting from 0 + // for each recorded Attributes. + // In REUSABLE_DATA MemoryMode, we strive for zero allocations. Since even removing + // a key-value from a map and putting it again on next recording will cost an allocation, + // we are keeping the aggregator handles in their map, and only reset their value once + // we finish collecting the aggregated value from each one. + // The SDK must adhere to keeping no more than maxCardinality unique Attributes in memory, + // hence during collect(), when the map is at full capacity, we try to clear away unused + // aggregator handles, so on next recording cycle using this map, there will be room for newly + // recorded Attributes. This comes at the expanse of memory allocations. This can be avoided + // if the user chooses to increase the maxCardinality. + if (memoryMode == REUSABLE_DATA && reset) { + if (aggregatorHandles.size() >= maxCardinality) { + aggregatorHandles.forEach( + (attribute, handle) -> { + if (!handle.hasRecordedValues()) { + aggregatorHandles.remove(attribute); + } + }); + } + } + // Grab aggregated points. - List points = new ArrayList<>(aggregatorHandles.size()); aggregatorHandles.forEach( (attributes, handle) -> { + if (!handle.hasRecordedValues()) { + return; + } T point = handle.aggregateThenMaybeReset(start, epochNanos, attributes, reset); - if (reset) { - aggregatorHandles.remove(attributes, handle); + + if (reset && memoryMode == IMMUTABLE_DATA) { // Return the aggregator to the pool. + // The pool is only used in DELTA temporality (since in CUMULATIVE the handler is + // always used as it is the place accumulating the values and never resets) + // AND only in IMMUTABLE_DATA memory mode since in REUSABLE_DATA we avoid + // using the pool since it allocates memory internally on each put() or remove() aggregatorHandlePool.offer(handle); } + if (point != null) { points.add(point); } @@ -158,6 +295,10 @@ public MetricData collect( aggregatorHandlePool.poll(); } + if (reset && memoryMode == REUSABLE_DATA) { + previousCollectionAggregatorHandles = aggregatorHandles; + } + if (points.isEmpty()) { return EmptyMetricData.getInstance(); } @@ -170,4 +311,32 @@ public MetricData collect( public MetricDescriptor getMetricDescriptor() { return metricDescriptor; } + + private static class AggregatorHolder { + private final ConcurrentHashMap> aggregatorHandles; + // Recording threads grab the current interval (AggregatorHolder) and atomically increment + // this by 2 before recording against it (and then decrement by two when done). + // + // The collection thread grabs the current interval (AggregatorHolder) and atomically + // increments this by 1 to "lock" this interval (and then waits for any active recording + // threads to complete before collecting it). + // + // Recording threads check the return value of their atomic increment, and if it's odd + // that means the collector thread has "locked" this interval for collection. + // + // But before the collector "locks" the interval it sets up a new current interval + // (AggregatorHolder), and so if a recording thread encounters an odd value, + // all it needs to do is release the "read lock" it just obtained (decrementing by 2), + // and then grab and record against the new current interval (AggregatorHolder). + private final AtomicInteger activeRecordingThreads = new AtomicInteger(0); + + private AggregatorHolder() { + aggregatorHandles = new ConcurrentHashMap<>(); + } + + private AggregatorHolder( + ConcurrentHashMap> aggregatorHandles) { + this.aggregatorHandles = aggregatorHandles; + } + } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/EmptyMetricStorage.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/EmptyMetricStorage.java index d076178d193..faaa7087c76 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/EmptyMetricStorage.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/EmptyMetricStorage.java @@ -39,4 +39,9 @@ public void recordLong(long value, Attributes attributes, Context context) {} @Override public void recordDouble(double value, Attributes attributes, Context context) {} + + @Override + public boolean isEnabled() { + return false; + } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ImmutableMeasurement.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ImmutableMeasurement.java new file mode 100644 index 00000000000..9cac89e96e9 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ImmutableMeasurement.java @@ -0,0 +1,64 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; +import io.opentelemetry.api.metrics.ObservableLongMeasurement; + +/** + * A long or double measurement recorded from {@link ObservableLongMeasurement} or {@link + * ObservableDoubleMeasurement}. + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +@AutoValue +public abstract class ImmutableMeasurement implements Measurement { + + static ImmutableMeasurement createDouble( + long startEpochNanos, long epochNanos, double value, Attributes attributes) { + return new AutoValue_ImmutableMeasurement( + startEpochNanos, + epochNanos, + /* hasLongValue= */ false, + 0L, + /* hasDoubleValue= */ true, + value, + attributes); + } + + static ImmutableMeasurement createLong( + long startEpochNanos, long epochNanos, long value, Attributes attributes) { + return new AutoValue_ImmutableMeasurement( + startEpochNanos, + epochNanos, + /* hasLongValue= */ true, + value, + /* hasDoubleValue= */ false, + 0.0, + attributes); + } + + @Override + public Measurement withAttributes(Attributes attributes) { + if (hasDoubleValue()) { + return createDouble(startEpochNanos(), epochNanos(), doubleValue(), attributes); + } else { + return createLong(startEpochNanos(), epochNanos(), longValue(), attributes); + } + } + + @Override + public Measurement withStartEpochNanos(long startEpochNanos) { + if (hasDoubleValue()) { + return createDouble(startEpochNanos, epochNanos(), doubleValue(), attributes()); + } else { + return createLong(startEpochNanos, epochNanos(), longValue(), attributes()); + } + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/Measurement.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/Measurement.java index 43370bb571f..a5023995dad 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/Measurement.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/Measurement.java @@ -5,7 +5,6 @@ package io.opentelemetry.sdk.metrics.internal.state; -import com.google.auto.value.AutoValue; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; import io.opentelemetry.api.metrics.ObservableLongMeasurement; @@ -13,45 +12,42 @@ /** * A long or double measurement recorded from {@link ObservableLongMeasurement} or {@link * ObservableDoubleMeasurement}. + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. */ -@AutoValue -public abstract class Measurement { - - static Measurement doubleMeasurement( - long startEpochNanos, long epochNanos, double value, Attributes attributes) { - return new AutoValue_Measurement( - startEpochNanos, - epochNanos, - /* hasLongValue= */ false, - 0L, - /* hasDoubleValue= */ true, - value, - attributes); - } - - static Measurement longMeasurement( - long startEpochNanos, long epochNanos, long value, Attributes attributes) { - return new AutoValue_Measurement( - startEpochNanos, - epochNanos, - /* hasLongValue= */ true, - value, - /* hasDoubleValue= */ false, - 0.0, - attributes); - } - - public abstract long startEpochNanos(); - - public abstract long epochNanos(); - - public abstract boolean hasLongValue(); - - public abstract long longValue(); - - public abstract boolean hasDoubleValue(); - - public abstract double doubleValue(); - - public abstract Attributes attributes(); +public interface Measurement { + long startEpochNanos(); + + long epochNanos(); + + boolean hasLongValue(); + + long longValue(); + + boolean hasDoubleValue(); + + double doubleValue(); + + Attributes attributes(); + + /** + * Updates the attributes. + * + * @param attributes The attributes to update + * @return The updated object. For {@link ImmutableMeasurement} it will be a new object with the + * updated attributes and for {@link MutableMeasurement} it will return itself with the + * attributes updated + */ + Measurement withAttributes(Attributes attributes); + + /** + * Updates the startEpochNanos. + * + * @param startEpochNanos start epoch nanosecond + * @return The updated object. For {@link ImmutableMeasurement} it will be a new object with the + * updated startEpochNanos and for {@link MutableMeasurement} it will return itself with the + * startEpochNanos updated + */ + Measurement withStartEpochNanos(long startEpochNanos); } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MeterProviderSharedState.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MeterProviderSharedState.java index 9b185b36a84..aaf932d9202 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MeterProviderSharedState.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MeterProviderSharedState.java @@ -21,9 +21,12 @@ @AutoValue @Immutable public abstract class MeterProviderSharedState { + public static MeterProviderSharedState create( Clock clock, Resource resource, ExemplarFilter exemplarFilter, long startEpochNanos) { - return new AutoValue_MeterProviderSharedState(clock, resource, startEpochNanos, exemplarFilter); + MeterProviderSharedState sharedState = + new AutoValue_MeterProviderSharedState(clock, resource, startEpochNanos, exemplarFilter); + return sharedState; } MeterProviderSharedState() {} @@ -38,5 +41,5 @@ public static MeterProviderSharedState create( public abstract long getStartEpochNanos(); /** Returns the {@link ExemplarFilter} for remembering synchronous measurements. */ - abstract ExemplarFilter getExemplarFilter(); + public abstract ExemplarFilter getExemplarFilter(); } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MeterSharedState.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MeterSharedState.java deleted file mode 100644 index 64ecefd5385..00000000000 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MeterSharedState.java +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.metrics.internal.state; - -import static java.util.stream.Collectors.toMap; - -import io.opentelemetry.api.internal.GuardedBy; -import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.metrics.Aggregation; -import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; -import io.opentelemetry.sdk.metrics.internal.export.RegisteredReader; -import io.opentelemetry.sdk.metrics.internal.view.RegisteredView; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.function.Function; - -/** - * State for a {@code Meter}. - * - *

    This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time. - */ -public class MeterSharedState { - - private final Object collectLock = new Object(); - private final Object callbackLock = new Object(); - - @GuardedBy("callbackLock") - private final List callbackRegistrations = new ArrayList<>(); - - private final Map readerStorageRegistries; - - private final InstrumentationScopeInfo instrumentationScopeInfo; - - private MeterSharedState( - InstrumentationScopeInfo instrumentationScopeInfo, List registeredReaders) { - this.instrumentationScopeInfo = instrumentationScopeInfo; - this.readerStorageRegistries = - registeredReaders.stream() - .collect(toMap(Function.identity(), unused -> new MetricStorageRegistry())); - } - - public static MeterSharedState create( - InstrumentationScopeInfo instrumentationScopeInfo, List registeredReaders) { - return new MeterSharedState(instrumentationScopeInfo, registeredReaders); - } - - /** - * Unregister the callback. - * - *

    Callbacks are originally registered via {@link #registerCallback(CallbackRegistration)}. - */ - public void removeCallback(CallbackRegistration callbackRegistration) { - synchronized (callbackLock) { - this.callbackRegistrations.remove(callbackRegistration); - } - } - - /** - * Register the callback. - * - *

    The callback will be invoked once per collection until unregistered via {@link - * #removeCallback(CallbackRegistration)}. - */ - public final void registerCallback(CallbackRegistration callbackRegistration) { - synchronized (callbackLock) { - callbackRegistrations.add(callbackRegistration); - } - } - - // only visible for testing. - /** Returns the {@link InstrumentationScopeInfo} for this {@code Meter}. */ - public InstrumentationScopeInfo getInstrumentationScopeInfo() { - return instrumentationScopeInfo; - } - - /** Collects all metrics. */ - public List collectAll( - RegisteredReader registeredReader, - MeterProviderSharedState meterProviderSharedState, - long epochNanos) { - List currentRegisteredCallbacks; - synchronized (callbackLock) { - currentRegisteredCallbacks = new ArrayList<>(callbackRegistrations); - } - // Collections across all readers are sequential - synchronized (collectLock) { - for (CallbackRegistration callbackRegistration : currentRegisteredCallbacks) { - callbackRegistration.invokeCallback( - registeredReader, meterProviderSharedState.getStartEpochNanos(), epochNanos); - } - - Collection storages = - Objects.requireNonNull(readerStorageRegistries.get(registeredReader)).getStorages(); - List result = new ArrayList<>(storages.size()); - for (MetricStorage storage : storages) { - MetricData current = - storage.collect( - meterProviderSharedState.getResource(), - getInstrumentationScopeInfo(), - meterProviderSharedState.getStartEpochNanos(), - epochNanos); - // Ignore if the metric data doesn't have any data points, for example when aggregation is - // Aggregation#drop() - if (!current.isEmpty()) { - result.add(current); - } - } - return result; - } - } - - /** Reset the meter state, clearing all registered callbacks and storages. */ - public void resetForTest() { - synchronized (collectLock) { - synchronized (callbackLock) { - callbackRegistrations.clear(); - } - this.readerStorageRegistries.values().forEach(MetricStorageRegistry::resetForTest); - } - } - - /** Registers new synchronous storage associated with a given instrument. */ - public final WriteableMetricStorage registerSynchronousMetricStorage( - InstrumentDescriptor instrument, MeterProviderSharedState meterProviderSharedState) { - - List registeredStorages = new ArrayList<>(); - for (Map.Entry entry : - readerStorageRegistries.entrySet()) { - RegisteredReader reader = entry.getKey(); - MetricStorageRegistry registry = entry.getValue(); - for (RegisteredView registeredView : - reader.getViewRegistry().findViews(instrument, getInstrumentationScopeInfo())) { - if (Aggregation.drop() == registeredView.getView().getAggregation()) { - continue; - } - registeredStorages.add( - registry.register( - SynchronousMetricStorage.create( - reader, - registeredView, - instrument, - meterProviderSharedState.getExemplarFilter()))); - } - } - - if (registeredStorages.size() == 1) { - return registeredStorages.get(0); - } - - return new MultiWritableMetricStorage(registeredStorages); - } - - /** Register new asynchronous storage associated with a given instrument. */ - public final SdkObservableMeasurement registerObservableMeasurement( - InstrumentDescriptor instrumentDescriptor) { - List> registeredStorages = new ArrayList<>(); - for (Map.Entry entry : - readerStorageRegistries.entrySet()) { - RegisteredReader reader = entry.getKey(); - MetricStorageRegistry registry = entry.getValue(); - for (RegisteredView registeredView : - reader.getViewRegistry().findViews(instrumentDescriptor, getInstrumentationScopeInfo())) { - if (Aggregation.drop() == registeredView.getView().getAggregation()) { - continue; - } - registeredStorages.add( - registry.register( - AsynchronousMetricStorage.create(reader, registeredView, instrumentDescriptor))); - } - } - - return SdkObservableMeasurement.create( - instrumentationScopeInfo, instrumentDescriptor, registeredStorages); - } -} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MetricStorageRegistry.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MetricStorageRegistry.java index a070728f06a..34bfd577a86 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MetricStorageRegistry.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MetricStorageRegistry.java @@ -88,7 +88,7 @@ public I register(I newStorage) { } /** Reset the storage registry, clearing all storages. */ - void resetForTest() { + public void resetForTest() { synchronized (lock) { registry.clear(); } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MultiWritableMetricStorage.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MultiWritableMetricStorage.java deleted file mode 100644 index 59632a9cc4a..00000000000 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MultiWritableMetricStorage.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.metrics.internal.state; - -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.context.Context; -import java.util.List; - -class MultiWritableMetricStorage implements WriteableMetricStorage { - private final List storages; - - MultiWritableMetricStorage(List storages) { - this.storages = storages; - } - - @Override - public void recordLong(long value, Attributes attributes, Context context) { - for (WriteableMetricStorage storage : storages) { - storage.recordLong(value, attributes, context); - } - } - - @Override - public void recordDouble(double value, Attributes attributes, Context context) { - for (WriteableMetricStorage storage : storages) { - storage.recordDouble(value, attributes, context); - } - } -} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MutableMeasurement.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MutableMeasurement.java new file mode 100644 index 00000000000..7bac8202447 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/MutableMeasurement.java @@ -0,0 +1,125 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import io.opentelemetry.api.common.Attributes; + +/** + * A mutable {@link Measurement} implementation + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + *

    This class is not thread-safe. + */ +public final class MutableMeasurement implements Measurement { + + static void setDoubleMeasurement( + MutableMeasurement mutableMeasurement, + long startEpochNanos, + long epochNanos, + double value, + Attributes attributes) { + mutableMeasurement.set( + startEpochNanos, + epochNanos, + /* hasLongValue= */ false, + 0L, + /* hasDoubleValue= */ true, + value, + attributes); + } + + static void setLongMeasurement( + MutableMeasurement mutableMeasurement, + long startEpochNanos, + long epochNanos, + long value, + Attributes attributes) { + mutableMeasurement.set( + startEpochNanos, + epochNanos, + /* hasLongValue= */ true, + value, + /* hasDoubleValue= */ false, + 0.0, + attributes); + } + + private long startEpochNanos; + private long epochNanos; + private boolean hasLongValue; + private long longValue; + private boolean hasDoubleValue; + private double doubleValue; + + private Attributes attributes = Attributes.empty(); + + /** Sets the values. */ + private void set( + long startEpochNanos, + long epochNanos, + boolean hasLongValue, + long longValue, + boolean hasDoubleValue, + double doubleValue, + Attributes attributes) { + this.startEpochNanos = startEpochNanos; + this.epochNanos = epochNanos; + this.hasLongValue = hasLongValue; + this.longValue = longValue; + this.hasDoubleValue = hasDoubleValue; + this.doubleValue = doubleValue; + this.attributes = attributes; + } + + @Override + public Measurement withStartEpochNanos(long startEpochNanos) { + this.startEpochNanos = startEpochNanos; + return this; + } + + @Override + public Measurement withAttributes(Attributes attributes) { + this.attributes = attributes; + return this; + } + + @Override + public long startEpochNanos() { + return startEpochNanos; + } + + @Override + public long epochNanos() { + return epochNanos; + } + + @Override + public boolean hasLongValue() { + return hasLongValue; + } + + @Override + public long longValue() { + return longValue; + } + + @Override + public boolean hasDoubleValue() { + return hasDoubleValue; + } + + @Override + public double doubleValue() { + return doubleValue; + } + + @Override + public Attributes attributes() { + return attributes; + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ObjectPool.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ObjectPool.java new file mode 100644 index 00000000000..8be6abcb5dd --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/ObjectPool.java @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import java.util.function.Supplier; + +/** + * A pool of objects of type {@code T}. + * + *

    When an object is borrowed from an empty pool, an object will be created by the supplied + * {@code objectCreator} and returned immediately. When the pool is not empty, an object is removed + * from the pool and returned. The user is expected to return the object to the pool when it is no + * longer used. + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + *

    This class is not thread-safe. + */ +public final class ObjectPool { + private final ArrayBasedStack pool; + private final Supplier objectCreator; + + /** + * Constructs an object pool. + * + * @param objectCreator Supplier used to create an object when the pool is empty + */ + public ObjectPool(Supplier objectCreator) { + this.pool = new ArrayBasedStack<>(); + this.objectCreator = objectCreator; + } + + /** + * Gets an object from the pool. + * + * @return An object from the pool, or a new object if the pool is empty + */ + public T borrowObject() { + T object = pool.pop(); + if (object == null) { + object = objectCreator.get(); + } + return object; + } + + public void returnObject(T object) { + pool.push(object); + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/PooledHashMap.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/PooledHashMap.java new file mode 100644 index 00000000000..2a9bfd8cc6b --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/PooledHashMap.java @@ -0,0 +1,267 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import static java.util.Objects.requireNonNull; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.BiConsumer; +import javax.annotation.Nullable; + +/** + * A bucket-based hash map with an internal re-usable map entry objects pool + * + *

    The goal of this map is to minimize memory allocation, leading to reduced time spent in + * garbage collection. + * + *

    This map avoids allocating a new map entry on each put operation by maintaining a pool of + * reusable (mutable) map entries and borrowing a map entry object from the pool to hold the given + * key-value of the put operation. The borrowed object is returned to the pool when the map entry + * key is removed from the map. + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + * + *

    This class is not thread-safe. + * + * @param The map key type + * @param The map value type + */ +@SuppressWarnings("ForLoopReplaceableByForEach") +public final class PooledHashMap implements Map { + private static final int DEFAULT_CAPACITY = 16; + private static final float LOAD_FACTOR = 0.75f; + + private ArrayList>[] table; + private final ObjectPool> entryPool; + private int size; + + /** + * Creates a {@link PooledHashMap} with {@code capacity} buckets. + * + *

    The hashmap contains an array of buckets, each is an array-list of items. The number of + * buckets expands over time to avoid having too many items in one bucket, otherwise accessing an + * item by key won't be a constant time complexity. + * + * @param capacity The initial number of buckets to start with + */ + @SuppressWarnings({"unchecked"}) + public PooledHashMap(int capacity) { + this.table = (ArrayList>[]) new ArrayList[capacity]; + this.entryPool = new ObjectPool<>(Entry::new); + this.size = 0; + } + + /** + * Creates a new {@link PooledHashMap} with a default amount of buckets (capacity). + * + * @see PooledHashMap#PooledHashMap(int) + */ + public PooledHashMap() { + this(DEFAULT_CAPACITY); + } + + /** + * Add a key, value pair to the map. + * + *

    Internally it uses a MapEntry from a pool of entries, to store this mapping + * + * @param key key with which the specified value is to be associated + * @param value value to be associated with the specified key + * @return Null if the was no previous mapping for this key, or the value of the previous mapping + * of this key + */ + @Override + @Nullable + public V put(K key, V value) { + requireNonNull(key, "This map does not support null keys"); + requireNonNull(value, "This map does not support null values"); + if (size > LOAD_FACTOR * table.length) { + rehash(); + } + + int bucket = getBucket(key); + ArrayList> entries = table[bucket]; + if (entries == null) { + entries = new ArrayList<>(); + table[bucket] = entries; + } else { + // Don't optimize to enhanced for-loop since implicit iterator used allocated memory in O(n) + for (int i = 0; i < entries.size(); i++) { + Entry entry = entries.get(i); + if (Objects.equals(entry.key, key)) { + V oldValue = entry.value; + entry.value = value; + return oldValue; + } + } + } + Entry entry = entryPool.borrowObject(); + entry.key = key; + entry.value = value; + entries.add(entry); + size++; + return null; + } + + @SuppressWarnings({"unchecked"}) + private void rehash() { + ArrayList>[] oldTable = table; + table = (ArrayList>[]) new ArrayList[2 * oldTable.length]; + + // put() to new table below will reset size back to correct number + size = 0; + + for (int i = 0; i < oldTable.length; i++) { + ArrayList> bucket = oldTable[i]; + if (bucket != null) { + for (Entry entry : bucket) { + put(requireNonNull(entry.key), requireNonNull(entry.value)); + entryPool.returnObject(entry); + } + bucket.clear(); + } + } + } + + /** + * Retrieves the mapped value for {@code key}. + * + * @param key the key whose associated value is to be returned + * @return The mapped value for {@code key} or null if there is no such mapping + */ + @Override + @Nullable + @SuppressWarnings("unchecked") + public V get(Object key) { + requireNonNull(key, "This map does not support null keys"); + + int bucket = getBucket((K) key); + ArrayList> entries = table[bucket]; + if (entries != null) { + for (int i = 0; i < entries.size(); i++) { + Entry entry = entries.get(i); + if (Objects.equals(entry.key, key)) { + return entry.value; + } + } + } + return null; + } + + /** + * Removes the mapping for the given {@code key}. + * + * @param key key whose mapping is to be removed from the map + * @return The value mapped to this key, if the mapping exists, or null otherwise + */ + @Override + @Nullable + @SuppressWarnings("unchecked") + public V remove(Object key) { + requireNonNull(key, "This map does not support null keys"); + + int bucket = getBucket((K) key); + ArrayList> entries = table[bucket]; + if (entries != null) { + for (int i = 0; i < entries.size(); i++) { + Entry entry = entries.get(i); + if (Objects.equals(entry.key, key)) { + V oldValue = entry.value; + entries.remove(i); + entryPool.returnObject(entry); + size--; + return oldValue; + } + } + } + return null; + } + + @Override + public int size() { + return size; + } + + @Override + public boolean isEmpty() { + return size == 0; + } + + @Override + public boolean containsKey(Object key) { + requireNonNull(key, "This map does not support null keys"); + + return get(key) != null; + } + + @Override + public boolean containsValue(Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public void clear() { + for (int i = 0; i < table.length; i++) { + ArrayList> bucket = table[i]; + if (bucket != null) { + for (int j = 0; j < bucket.size(); j++) { + Entry entry = bucket.get(j); + entryPool.returnObject(entry); + } + bucket.clear(); + } + } + size = 0; + } + + @Override + public void forEach(BiConsumer action) { + for (int j = 0; j < table.length; j++) { + ArrayList> bucket = table[j]; + if (bucket != null) { + for (int i = 0; i < bucket.size(); i++) { + Entry entry = bucket.get(i); + action.accept(entry.key, entry.value); + } + } + } + } + + private int getBucket(K key) { + return Math.abs(key.hashCode() % table.length); + } + + @Override + public Set> entrySet() { + throw new UnsupportedOperationException(); + } + + @Override + public Collection values() { + throw new UnsupportedOperationException(); + } + + @Override + public void putAll(Map m) { + throw new UnsupportedOperationException(); + } + + @Override + public Set keySet() { + throw new UnsupportedOperationException(); + } + + private static class Entry { + @Nullable K key; + + @Nullable V value; + } +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/SdkObservableMeasurement.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/SdkObservableMeasurement.java index df2d2b6512c..27eeac81190 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/SdkObservableMeasurement.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/SdkObservableMeasurement.java @@ -5,17 +5,19 @@ package io.opentelemetry.sdk.metrics.internal.state; -import static io.opentelemetry.sdk.metrics.internal.state.Measurement.doubleMeasurement; -import static io.opentelemetry.sdk.metrics.internal.state.Measurement.longMeasurement; +import static io.opentelemetry.sdk.metrics.internal.state.ImmutableMeasurement.createDouble; +import static io.opentelemetry.sdk.metrics.internal.state.ImmutableMeasurement.createLong; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; import io.opentelemetry.api.metrics.ObservableLongMeasurement; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.internal.ThrottlingLogger; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.export.RegisteredReader; import java.util.List; +import java.util.Objects; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nullable; @@ -36,6 +38,9 @@ public final class SdkObservableMeasurement private final InstrumentDescriptor instrumentDescriptor; private final List> storages; + /** Only used when {@code activeReader}'s memoryMode is {@link MemoryMode#REUSABLE_DATA}. */ + private final MutableMeasurement mutableMeasurement = new MutableMeasurement(); + // These fields are set before invoking callbacks. They allow measurements to be recorded to the // storages for correct reader, and with the correct time. @Nullable private volatile RegisteredReader activeReader; @@ -104,7 +109,23 @@ public void record(long value) { @Override public void record(long value, Attributes attributes) { - doRecord(longMeasurement(startEpochNanos, epochNanos, value, attributes)); + if (activeReader == null) { + logNoActiveReader(); + return; + } + + Measurement measurement; + + MemoryMode memoryMode = activeReader.getReader().getMemoryMode(); + if (Objects.requireNonNull(memoryMode) == MemoryMode.IMMUTABLE_DATA) { + measurement = createLong(startEpochNanos, epochNanos, value, attributes); + } else { + MutableMeasurement.setLongMeasurement( + mutableMeasurement, startEpochNanos, epochNanos, value, attributes); + measurement = mutableMeasurement; + } + + doRecord(measurement); } @Override @@ -114,23 +135,48 @@ public void record(double value) { @Override public void record(double value, Attributes attributes) { - doRecord(doubleMeasurement(startEpochNanos, epochNanos, value, attributes)); - } - - private void doRecord(Measurement measurement) { - RegisteredReader activeReader = this.activeReader; if (activeReader == null) { - throttlingLogger.log( + logNoActiveReader(); + return; + } + if (Double.isNaN(value)) { + logger.log( Level.FINE, - "Measurement recorded for instrument " + "Instrument " + instrumentDescriptor.getName() - + " outside callback registered to instrument. Dropping measurement."); + + " has recorded measurement Not-a-Number (NaN) value with attributes " + + attributes + + ". Dropping measurement."); return; } + + Measurement measurement; + MemoryMode memoryMode = activeReader.getReader().getMemoryMode(); + if (Objects.requireNonNull(memoryMode) == MemoryMode.IMMUTABLE_DATA) { + measurement = createDouble(startEpochNanos, epochNanos, value, attributes); + } else { + MutableMeasurement.setDoubleMeasurement( + mutableMeasurement, startEpochNanos, epochNanos, value, attributes); + measurement = mutableMeasurement; + } + + doRecord(measurement); + } + + private void doRecord(Measurement measurement) { + RegisteredReader activeReader = this.activeReader; for (AsynchronousMetricStorage storage : storages) { if (storage.getRegisteredReader().equals(activeReader)) { storage.record(measurement); } } } + + private void logNoActiveReader() { + throttlingLogger.log( + Level.FINE, + "Measurement recorded for instrument " + + instrumentDescriptor.getName() + + " outside callback registered to instrument. Dropping measurement."); + } } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/SynchronousMetricStorage.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/SynchronousMetricStorage.java index 3b821367343..f743c26cbc7 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/SynchronousMetricStorage.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/SynchronousMetricStorage.java @@ -46,7 +46,8 @@ static SynchronousMetricStorage cr MetricDescriptor.create(view, registeredView.getViewSourceInfo(), instrumentDescriptor); Aggregator aggregator = ((AggregatorFactory) view.getAggregation()) - .createAggregator(instrumentDescriptor, exemplarFilter); + .createAggregator( + instrumentDescriptor, exemplarFilter, registeredReader.getReader().getMemoryMode()); // We won't be storing this metric. if (Aggregator.drop() == aggregator) { return empty(); diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/WriteableMetricStorage.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/WriteableMetricStorage.java index 77e2dd510b8..7191a63f1e0 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/WriteableMetricStorage.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/state/WriteableMetricStorage.java @@ -22,4 +22,10 @@ public interface WriteableMetricStorage { /** Records a measurement. */ void recordDouble(double value, Attributes attributes, Context context); + + /** + * Returns {@code true} if the storage is actively recording measurements, and {@code false} + * otherwise (i.e. noop / empty metric storage is installed). + */ + boolean isEnabled(); } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/AdviceAttributesProcessor.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/AdviceAttributesProcessor.java index 161dcd72832..8b77b48e631 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/AdviceAttributesProcessor.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/AdviceAttributesProcessor.java @@ -7,7 +7,6 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.context.Context; import java.util.HashSet; import java.util.List; @@ -23,9 +22,7 @@ final class AdviceAttributesProcessor extends AttributesProcessor { @Override public Attributes process(Attributes incoming, Context context) { - AttributesBuilder builder = incoming.toBuilder(); - builder.removeIf(key -> !attributeKeys.contains(key)); - return builder.build(); + return FilteredAttributes.create(incoming, attributeKeys); } @Override diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/Base2ExponentialHistogramAggregation.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/Base2ExponentialHistogramAggregation.java index a0408bc32fd..a4facefca7a 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/Base2ExponentialHistogramAggregation.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/Base2ExponentialHistogramAggregation.java @@ -8,6 +8,7 @@ import static io.opentelemetry.api.internal.Utils.checkArgument; import io.opentelemetry.sdk.common.Clock; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.internal.RandomSupplier; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.data.ExemplarData; @@ -58,7 +59,7 @@ public static Aggregation getDefault() { * @return the aggregation */ public static Aggregation create(int maxBuckets, int maxScale) { - checkArgument(maxBuckets >= 1, "maxBuckets must be > 0"); + checkArgument(maxBuckets >= 2, "maxBuckets must be >= 2"); checkArgument(maxScale <= 20 && maxScale >= -10, "maxScale must be -10 <= x <= 20"); return new Base2ExponentialHistogramAggregation(maxBuckets, maxScale); } @@ -66,18 +67,22 @@ public static Aggregation create(int maxBuckets, int maxScale) { @Override @SuppressWarnings("unchecked") public Aggregator createAggregator( - InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) { + InstrumentDescriptor instrumentDescriptor, + ExemplarFilter exemplarFilter, + MemoryMode memoryMode) { return (Aggregator) new DoubleBase2ExponentialHistogramAggregator( () -> ExemplarReservoir.filtered( exemplarFilter, - ExemplarReservoir.doubleFixedSizeReservoir( - Clock.getDefault(), - Runtime.getRuntime().availableProcessors(), - RandomSupplier.platformDefault())), + ExemplarReservoir.longToDouble( + ExemplarReservoir.doubleFixedSizeReservoir( + Clock.getDefault(), + Runtime.getRuntime().availableProcessors(), + RandomSupplier.platformDefault()))), maxBuckets, - maxScale); + maxScale, + memoryMode); } @Override diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/DefaultAggregation.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/DefaultAggregation.java index 798c9ea11b4..494e486c333 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/DefaultAggregation.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/DefaultAggregation.java @@ -5,6 +5,7 @@ package io.opentelemetry.sdk.metrics.internal.view; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.internal.ThrottlingLogger; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.data.ExemplarData; @@ -49,6 +50,7 @@ private static Aggregation resolve(InstrumentDescriptor instrument, boolean with } return ExplicitBucketHistogramAggregation.getDefault(); case OBSERVABLE_GAUGE: + case GAUGE: return LastValueAggregation.getInstance(); } logger.log(Level.WARNING, "Unable to find default aggregation for instrument: " + instrument); @@ -57,9 +59,11 @@ private static Aggregation resolve(InstrumentDescriptor instrument, boolean with @Override public Aggregator createAggregator( - InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) { + InstrumentDescriptor instrumentDescriptor, + ExemplarFilter exemplarFilter, + MemoryMode memoryMode) { return ((AggregatorFactory) resolve(instrumentDescriptor, /* withAdvice= */ true)) - .createAggregator(instrumentDescriptor, exemplarFilter); + .createAggregator(instrumentDescriptor, exemplarFilter, memoryMode); } @Override diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/DropAggregation.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/DropAggregation.java index df02ce003db..2b27938c20a 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/DropAggregation.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/DropAggregation.java @@ -5,6 +5,7 @@ package io.opentelemetry.sdk.metrics.internal.view; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.PointData; @@ -32,7 +33,9 @@ private DropAggregation() {} @Override @SuppressWarnings("unchecked") public Aggregator createAggregator( - InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) { + InstrumentDescriptor instrumentDescriptor, + ExemplarFilter exemplarFilter, + MemoryMode memoryMode) { return (Aggregator) Aggregator.drop(); } diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/ExplicitBucketHistogramAggregation.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/ExplicitBucketHistogramAggregation.java index 1ec3867dc7e..e452d02792d 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/ExplicitBucketHistogramAggregation.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/ExplicitBucketHistogramAggregation.java @@ -6,6 +6,7 @@ package io.opentelemetry.sdk.metrics.internal.view; import io.opentelemetry.sdk.common.Clock; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.PointData; @@ -50,15 +51,19 @@ private ExplicitBucketHistogramAggregation(List bucketBoundaries) { @Override @SuppressWarnings("unchecked") public Aggregator createAggregator( - InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) { + InstrumentDescriptor instrumentDescriptor, + ExemplarFilter exemplarFilter, + MemoryMode memoryMode) { return (Aggregator) new DoubleExplicitBucketHistogramAggregator( bucketBoundaryArray, () -> ExemplarReservoir.filtered( exemplarFilter, - ExemplarReservoir.histogramBucketReservoir( - Clock.getDefault(), bucketBoundaries))); + ExemplarReservoir.longToDouble( + ExemplarReservoir.histogramBucketReservoir( + Clock.getDefault(), bucketBoundaries))), + memoryMode); } @Override diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/FilteredAttributes.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/FilteredAttributes.java new file mode 100644 index 00000000000..d71616c20e8 --- /dev/null +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/FilteredAttributes.java @@ -0,0 +1,284 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.view; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.internal.ImmutableKeyValuePairs; +import java.util.BitSet; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.StringJoiner; +import java.util.function.BiConsumer; +import javax.annotation.Nullable; + +/** + * Filtered attributes is a filtered view of a {@link ImmutableKeyValuePairs} backed {@link + * Attributes} instance. Rather than creating an entirely new attributes instance, it keeps track of + * which source attributes are excluded while implementing the {@link Attributes} interface. + * + *

    Notably, the {@link FilteredAttributes#equals(Object)} and {@link + * FilteredAttributes#hashCode()} depend on comparison against other {@link FilteredAttributes} + * instances. This means that where {@link FilteredAttributes} is used for things like map keys, it + * must be used for all keys in that map. You cannot mix {@link Attributes} implementations. This is + * also true for the default attributes implementation. + */ +@SuppressWarnings("unchecked") +abstract class FilteredAttributes implements Attributes { + + // Backing source data from ImmutableKeyValuePairs.data. This array MUST NOT be mutated. + private final Object[] sourceData; + private final int hashcode; + private final int size; + + private FilteredAttributes(Object[] sourceData, int hashcode, int size) { + this.sourceData = sourceData; + this.hashcode = hashcode; + this.size = size; + } + + /** + * Create a {@link FilteredAttributes} instance. + * + * @param source the source attributes, which SHOULD be based on the standard {@link + * ImmutableKeyValuePairs}. If not, the source will first be converted to the standard + * implementation. + * @param includedKeys the set of attribute keys to include in the output. + */ + @SuppressWarnings("NullAway") + static Attributes create(Attributes source, Set> includedKeys) { + // Convert alternative implementations of Attributes to standard implementation. + // This is required for proper functioning of equals and hashcode. + if (!(source instanceof ImmutableKeyValuePairs)) { + source = convertToStandardImplementation(source); + } + if (!(source instanceof ImmutableKeyValuePairs)) { + throw new IllegalStateException( + "Expected ImmutableKeyValuePairs based implementation of Attributes. This is a programming error."); + } + // Compute filteredIndices (and filteredIndicesBitSet if needed) during initialization. Compute + // hashcode at the same time to avoid iteration later. + Object[] sourceData = ((ImmutableKeyValuePairs) source).getData(); + int filteredIndices = 0; + BitSet filteredIndicesBitSet = + source.size() > SmallFilteredAttributes.BITS_PER_INTEGER ? new BitSet(source.size()) : null; + int hashcode = 1; + int size = 0; + for (int i = 0; i < sourceData.length; i += 2) { + int filterIndex = i / 2; + // If the sourceData key isn't present in includedKeys, record the exclusion in + // filteredIndices or filteredIndicesBitSet (depending on size) + if (!includedKeys.contains(sourceData[i])) { + // Record + if (filteredIndicesBitSet != null) { + filteredIndicesBitSet.set(filterIndex); + } else { + filteredIndices = filteredIndices | (1 << filterIndex); + } + } else { // The key-value is included in the output, record in the hashcode and size. + hashcode = 31 * hashcode + sourceData[i].hashCode(); + hashcode = 31 * hashcode + sourceData[i + 1].hashCode(); + size++; + } + } + // If size is 0, short circuit and return Attributes.empty() + if (size == 0) { + return Attributes.empty(); + } + return filteredIndicesBitSet != null + ? new RegularFilteredAttributes(sourceData, hashcode, size, filteredIndicesBitSet) + : new SmallFilteredAttributes(sourceData, hashcode, size, filteredIndices); + } + + /** + * Implementation that relies on the source having less than {@link #BITS_PER_INTEGER} attributes, + * and storing entry filter status in the bits of an integer. + */ + private static class SmallFilteredAttributes extends FilteredAttributes { + + private static final int BITS_PER_INTEGER = 32; + + private final int filteredIndices; + + private SmallFilteredAttributes( + Object[] sourceData, int hashcode, int size, int filteredIndices) { + super(sourceData, hashcode, size); + this.filteredIndices = filteredIndices; + } + + @Override + boolean includeIndexInOutput(int sourceIndex) { + return (filteredIndices & (1 << (sourceIndex / 2))) == 0; + } + } + + /** + * Implementation that can handle attributes of arbitrary size by storing filter status in a + * {@link BitSet}. + */ + private static class RegularFilteredAttributes extends FilteredAttributes { + + private final BitSet bitSet; + + private RegularFilteredAttributes(Object[] sourceData, int hashcode, int size, BitSet bitSet) { + super(sourceData, hashcode, size); + this.bitSet = bitSet; + } + + @Override + boolean includeIndexInOutput(int sourceIndex) { + return !bitSet.get(sourceIndex / 2); + } + } + + private static Attributes convertToStandardImplementation(Attributes source) { + AttributesBuilder builder = Attributes.builder(); + source.forEach( + (key, value) -> putInBuilder(builder, (AttributeKey) key, value)); + return builder.build(); + } + + @Nullable + @Override + public T get(AttributeKey key) { + if (key == null) { + return null; + } + for (int i = 0; i < sourceData.length; i += 2) { + if (key.equals(sourceData[i]) && includeIndexInOutput(i)) { + return (T) sourceData[i + 1]; + } + } + return null; + } + + @Override + public void forEach(BiConsumer, ? super Object> consumer) { + for (int i = 0; i < sourceData.length; i += 2) { + if (includeIndexInOutput(i)) { + consumer.accept((AttributeKey) sourceData[i], sourceData[i + 1]); + } + } + } + + @Override + public int size() { + return size; + } + + @Override + public boolean isEmpty() { + // #create short circuits and returns Attributes.empty() if empty, so FilteredAttributes is + // never empty + return false; + } + + @Override + public Map, Object> asMap() { + Map, Object> result = new LinkedHashMap<>(size); + for (int i = 0; i < sourceData.length; i += 2) { + if (includeIndexInOutput(i)) { + result.put((AttributeKey) sourceData[i], sourceData[i + 1]); + } + } + return Collections.unmodifiableMap(result); + } + + @Override + public AttributesBuilder toBuilder() { + AttributesBuilder builder = Attributes.builder(); + for (int i = 0; i < sourceData.length; i += 2) { + if (includeIndexInOutput(i)) { + putInBuilder(builder, (AttributeKey) sourceData[i], sourceData[i + 1]); + } + } + return builder; + } + + private static void putInBuilder(AttributesBuilder builder, AttributeKey key, T value) { + builder.put(key, value); + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + // We require other object to also be instances of FilteredAttributes. In other words, where one + // FilteredAttributes is used for a key in a map, it must be used for all the keys. Note, this + // same requirement exists for the default Attributes implementation - you can not mix + // implementations. + if (object == null || !(object instanceof FilteredAttributes)) { + return false; + } + + FilteredAttributes that = (FilteredAttributes) object; + // exit early if sizes are not equal + if (size() != that.size()) { + return false; + } + // Compare each non-filtered key / value pair from this to that. + // Depends on the entries from the backing ImmutableKeyValuePairs being sorted. + int thisIndex = 0; + int thatIndex = 0; + boolean thisDone; + boolean thatDone; + do { + thisDone = thisIndex >= this.sourceData.length; + thatDone = thatIndex >= that.sourceData.length; + // advance to next unfiltered key value pair for this and that + if (!thisDone && !this.includeIndexInOutput(thisIndex)) { + thisIndex += 2; + continue; + } + if (!thatDone && !that.includeIndexInOutput(thatIndex)) { + thatIndex += 2; + continue; + } + // if we're done iterating both this and that, we exit and return true since these are equal + if (thisDone && thatDone) { + break; + } + // if either this or that is done iterating, but not both, these are not equal + if (thisDone != thatDone) { + return false; + } + // if we make it here, both thisIndex and thatIndex within bounds and are included in the + // output. the current + // key and value and this and that must be equal for this and that to be equal. + if (!Objects.equals(this.sourceData[thisIndex], that.sourceData[thatIndex]) + || !Objects.equals(this.sourceData[thisIndex + 1], that.sourceData[thatIndex + 1])) { + return false; + } + thisIndex += 2; + thatIndex += 2; + } while (true); + // if we make it here without exiting early, all elements of this and that are equal + return true; + } + + @Override + public int hashCode() { + return hashcode; + } + + @Override + public String toString() { + StringJoiner joiner = new StringJoiner(",", "FilteredAttributes{", "}"); + for (int i = 0; i < sourceData.length; i += 2) { + if (includeIndexInOutput(i)) { + joiner.add(((AttributeKey) sourceData[i]).getKey() + "=" + sourceData[i + 1]); + } + } + return joiner.toString(); + } + + abstract boolean includeIndexInOutput(int sourceIndex); +} diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/LastValueAggregation.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/LastValueAggregation.java index 5f6039532e0..693af692c93 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/LastValueAggregation.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/LastValueAggregation.java @@ -5,9 +5,14 @@ package io.opentelemetry.sdk.metrics.internal.view; +import io.opentelemetry.sdk.common.Clock; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.internal.RandomSupplier; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData; +import io.opentelemetry.sdk.metrics.data.LongExemplarData; import io.opentelemetry.sdk.metrics.data.PointData; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; @@ -16,6 +21,7 @@ import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarFilter; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; +import java.util.function.Supplier; /** * Last-value aggregation configuration. @@ -36,21 +42,45 @@ private LastValueAggregation() {} @Override @SuppressWarnings("unchecked") public Aggregator createAggregator( - InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) { + InstrumentDescriptor instrumentDescriptor, + ExemplarFilter exemplarFilter, + MemoryMode memoryMode) { // For the initial version we do not sample exemplars on gauges. switch (instrumentDescriptor.getValueType()) { case LONG: - return (Aggregator) new LongLastValueAggregator(ExemplarReservoir::longNoSamples); + { + Supplier> reservoirFactory = + () -> + ExemplarReservoir.filtered( + exemplarFilter, + ExemplarReservoir.longFixedSizeReservoir( + Clock.getDefault(), + Runtime.getRuntime().availableProcessors(), + RandomSupplier.platformDefault())); + return (Aggregator) new LongLastValueAggregator(reservoirFactory, memoryMode); + } case DOUBLE: - return (Aggregator) new DoubleLastValueAggregator(ExemplarReservoir::doubleNoSamples); + { + Supplier> reservoirFactory = + () -> + ExemplarReservoir.filtered( + exemplarFilter, + ExemplarReservoir.doubleFixedSizeReservoir( + Clock.getDefault(), + Runtime.getRuntime().availableProcessors(), + RandomSupplier.platformDefault())); + return (Aggregator) new DoubleLastValueAggregator(reservoirFactory, memoryMode); + } } throw new IllegalArgumentException("Invalid instrument value type"); } @Override public boolean isCompatibleWithInstrument(InstrumentDescriptor instrumentDescriptor) { - return instrumentDescriptor.getType() == InstrumentType.OBSERVABLE_GAUGE; + InstrumentType instrumentType = instrumentDescriptor.getType(); + return instrumentType == InstrumentType.OBSERVABLE_GAUGE + || instrumentType == InstrumentType.GAUGE; } @Override diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/SumAggregation.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/SumAggregation.java index b689d9f58f1..7ca1294666b 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/SumAggregation.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/SumAggregation.java @@ -6,6 +6,7 @@ package io.opentelemetry.sdk.metrics.internal.view; import io.opentelemetry.sdk.common.Clock; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.internal.RandomSupplier; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; @@ -39,7 +40,9 @@ private SumAggregation() {} @Override @SuppressWarnings("unchecked") public Aggregator createAggregator( - InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) { + InstrumentDescriptor instrumentDescriptor, + ExemplarFilter exemplarFilter, + MemoryMode memoryMode) { switch (instrumentDescriptor.getValueType()) { case LONG: { @@ -51,7 +54,8 @@ public Aggregator createAggr Clock.getDefault(), Runtime.getRuntime().availableProcessors(), RandomSupplier.platformDefault())); - return (Aggregator) new LongSumAggregator(instrumentDescriptor, reservoirFactory); + return (Aggregator) + new LongSumAggregator(instrumentDescriptor, reservoirFactory, memoryMode); } case DOUBLE: { @@ -63,7 +67,8 @@ public Aggregator createAggr Clock.getDefault(), Runtime.getRuntime().availableProcessors(), RandomSupplier.platformDefault())); - return (Aggregator) new DoubleSumAggregator(instrumentDescriptor, reservoirFactory); + return (Aggregator) + new DoubleSumAggregator(instrumentDescriptor, reservoirFactory, memoryMode); } } throw new IllegalArgumentException("Invalid instrument value type"); diff --git a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/ViewRegistry.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/ViewRegistry.java index 9e5bba37233..0904a38ff0f 100644 --- a/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/ViewRegistry.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/ViewRegistry.java @@ -9,28 +9,27 @@ import static java.util.Objects.requireNonNull; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.GlobUtil; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.InstrumentSelector; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder; import io.opentelemetry.sdk.metrics.View; +import io.opentelemetry.sdk.metrics.export.CardinalityLimitSelector; import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregationUtil; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; import io.opentelemetry.sdk.metrics.internal.debug.SourceInfo; import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; -import io.opentelemetry.sdk.metrics.internal.export.CardinalityLimitSelector; import io.opentelemetry.sdk.metrics.internal.state.MetricStorage; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Predicate; import java.util.logging.Level; import java.util.logging.Logger; -import java.util.regex.Pattern; import javax.annotation.concurrent.Immutable; /** @@ -170,7 +169,8 @@ private static boolean matchesSelector( return false; } if (selector.getInstrumentName() != null - && !toGlobPatternPredicate(selector.getInstrumentName()).test(descriptor.getName())) { + && !GlobUtil.toGlobPatternPredicate(selector.getInstrumentName()) + .test(descriptor.getName())) { return false; } return matchesMeter(selector, meterScope); @@ -190,69 +190,6 @@ private static boolean matchesMeter( || selector.getMeterSchemaUrl().equals(meterScope.getSchemaUrl()); } - /** - * Return a predicate that returns {@code true} if a string matches the {@code globPattern}. - * - *

    {@code globPattern} may contain the wildcard characters {@code *} and {@code ?} with the - * following matching criteria: - * - *

      - *
    • {@code *} matches 0 or more instances of any character - *
    • {@code ?} matches exactly one instance of any character - *
    - */ - // Visible for testing - static Predicate toGlobPatternPredicate(String globPattern) { - // Match all - if (globPattern.equals("*")) { - return unused -> true; - } - - // If globPattern contains '*' or '?', convert it to a regex and return corresponding predicate - for (int i = 0; i < globPattern.length(); i++) { - char c = globPattern.charAt(i); - if (c == '*' || c == '?') { - Pattern pattern = toRegexPattern(globPattern); - return string -> pattern.matcher(string).matches(); - } - } - - // Exact match, ignoring case - return globPattern::equalsIgnoreCase; - } - - /** - * Transform the {@code globPattern} to a regex by converting {@code *} to {@code .*}, {@code ?} - * to {@code .}, and escaping other regex special characters. - */ - private static Pattern toRegexPattern(String globPattern) { - int tokenStart = -1; - StringBuilder patternBuilder = new StringBuilder(); - for (int i = 0; i < globPattern.length(); i++) { - char c = globPattern.charAt(i); - if (c == '*' || c == '?') { - if (tokenStart != -1) { - patternBuilder.append(Pattern.quote(globPattern.substring(tokenStart, i))); - tokenStart = -1; - } - if (c == '*') { - patternBuilder.append(".*"); - } else { - // c == '?' - patternBuilder.append("."); - } - } else { - if (tokenStart == -1) { - tokenStart = i; - } - } - } - if (tokenStart != -1) { - patternBuilder.append(Pattern.quote(globPattern.substring(tokenStart))); - } - return Pattern.compile(patternBuilder.toString()); - } - private static RegisteredView applyAdviceToDefaultView( RegisteredView instrumentDefaultView, Advice advice) { return RegisteredView.create( diff --git a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/package-info.java b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/package-info.java similarity index 65% rename from exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/package-info.java rename to sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/package-info.java index 23e486e2fd8..2cd6165107b 100644 --- a/exporters/jaeger/src/main/java/io/opentelemetry/exporter/jaeger/package-info.java +++ b/sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/internal/view/package-info.java @@ -3,7 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ +/** View related internal classes. */ @ParametersAreNonnullByDefault -package io.opentelemetry.exporter.jaeger; +package io.opentelemetry.sdk.metrics.internal.view; import javax.annotation.ParametersAreNonnullByDefault; diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/AbstractInstrumentBuilderTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/AbstractInstrumentBuilderTest.java deleted file mode 100644 index 15f6ba3030f..00000000000 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/AbstractInstrumentBuilderTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.metrics; - -import static org.assertj.core.api.Assertions.assertThat; - -import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarFilter; -import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.testing.time.TestClock; -import java.util.Collections; -import org.junit.jupiter.api.Test; - -class AbstractInstrumentBuilderTest { - - @Test - void stringRepresentation() { - InstrumentationScopeInfo scope = InstrumentationScopeInfo.create("scope-name"); - TestInstrumentBuilder builder = - new TestInstrumentBuilder( - MeterProviderSharedState.create( - TestClock.create(), Resource.getDefault(), ExemplarFilter.alwaysOff(), 0), - MeterSharedState.create(scope, Collections.emptyList()), - InstrumentType.COUNTER, - InstrumentValueType.LONG, - "instrument-name", - "instrument-description", - "instrument-unit"); - assertThat(builder.toString()) - .isEqualTo( - "TestInstrumentBuilder{" - + "descriptor=" - + "InstrumentDescriptor{" - + "name=instrument-name, " - + "description=instrument-description, " - + "unit=instrument-unit, " - + "type=COUNTER, " - + "valueType=LONG, " - + "advice=Advice{explicitBucketBoundaries=null, attributes=null}" - + "}}"); - } - - private static class TestInstrumentBuilder - extends AbstractInstrumentBuilder { - - TestInstrumentBuilder( - MeterProviderSharedState meterProviderSharedState, - MeterSharedState meterSharedState, - InstrumentType type, - InstrumentValueType valueType, - String name, - String description, - String unit) { - super(meterProviderSharedState, meterSharedState, type, valueType, name, description, unit); - } - - @Override - protected TestInstrumentBuilder getThis() { - return this; - } - } -} diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/AggregationTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/AggregationTest.java index 544eded9b84..be2502ec054 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/AggregationTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/AggregationTest.java @@ -31,9 +31,9 @@ void haveToString() { assertThat(Aggregation.base2ExponentialBucketHistogram()) .asString() .isEqualTo("Base2ExponentialHistogramAggregation{maxBuckets=160,maxScale=20}"); - assertThat(Aggregation.base2ExponentialBucketHistogram(1, 0)) + assertThat(Aggregation.base2ExponentialBucketHistogram(2, 0)) .asString() - .isEqualTo("Base2ExponentialHistogramAggregation{maxBuckets=1,maxScale=0}"); + .isEqualTo("Base2ExponentialHistogramAggregation{maxBuckets=2,maxScale=0}"); } @Test @@ -51,11 +51,13 @@ void aggregationIsCompatible() { InstrumentDescriptor observableUpDownCounter = descriptorForType(InstrumentType.OBSERVABLE_UP_DOWN_COUNTER); InstrumentDescriptor observableGauge = descriptorForType(InstrumentType.OBSERVABLE_GAUGE); + InstrumentDescriptor gauge = descriptorForType(InstrumentType.GAUGE); InstrumentDescriptor histogram = descriptorForType(InstrumentType.HISTOGRAM); AggregatorFactory defaultAggregation = ((AggregatorFactory) Aggregation.defaultAggregation()); assertThat(defaultAggregation.isCompatibleWithInstrument(counter)).isTrue(); assertThat(defaultAggregation.isCompatibleWithInstrument(observableCounter)).isTrue(); + assertThat(defaultAggregation.isCompatibleWithInstrument(gauge)).isTrue(); assertThat(defaultAggregation.isCompatibleWithInstrument(upDownCounter)).isTrue(); assertThat(defaultAggregation.isCompatibleWithInstrument(observableUpDownCounter)).isTrue(); assertThat(defaultAggregation.isCompatibleWithInstrument(observableGauge)).isTrue(); @@ -64,6 +66,7 @@ void aggregationIsCompatible() { AggregatorFactory drop = ((AggregatorFactory) Aggregation.drop()); assertThat(drop.isCompatibleWithInstrument(counter)).isTrue(); assertThat(drop.isCompatibleWithInstrument(observableCounter)).isTrue(); + assertThat(drop.isCompatibleWithInstrument(gauge)).isTrue(); assertThat(drop.isCompatibleWithInstrument(upDownCounter)).isTrue(); assertThat(drop.isCompatibleWithInstrument(observableUpDownCounter)).isTrue(); assertThat(drop.isCompatibleWithInstrument(observableGauge)).isTrue(); @@ -75,6 +78,7 @@ void aggregationIsCompatible() { assertThat(sum.isCompatibleWithInstrument(upDownCounter)).isTrue(); assertThat(sum.isCompatibleWithInstrument(observableUpDownCounter)).isTrue(); assertThat(sum.isCompatibleWithInstrument(observableGauge)).isFalse(); + assertThat(sum.isCompatibleWithInstrument(gauge)).isFalse(); assertThat(sum.isCompatibleWithInstrument(histogram)).isTrue(); AggregatorFactory explicitHistogram = @@ -84,6 +88,7 @@ void aggregationIsCompatible() { assertThat(explicitHistogram.isCompatibleWithInstrument(upDownCounter)).isFalse(); assertThat(explicitHistogram.isCompatibleWithInstrument(observableUpDownCounter)).isFalse(); assertThat(explicitHistogram.isCompatibleWithInstrument(observableGauge)).isFalse(); + assertThat(explicitHistogram.isCompatibleWithInstrument(gauge)).isFalse(); assertThat(explicitHistogram.isCompatibleWithInstrument(histogram)).isTrue(); AggregatorFactory exponentialHistogram = @@ -93,6 +98,7 @@ void aggregationIsCompatible() { assertThat(exponentialHistogram.isCompatibleWithInstrument(upDownCounter)).isFalse(); assertThat(exponentialHistogram.isCompatibleWithInstrument(observableUpDownCounter)).isFalse(); assertThat(exponentialHistogram.isCompatibleWithInstrument(observableGauge)).isFalse(); + assertThat(exponentialHistogram.isCompatibleWithInstrument(gauge)).isFalse(); assertThat(exponentialHistogram.isCompatibleWithInstrument(histogram)).isTrue(); AggregatorFactory lastValue = ((AggregatorFactory) Aggregation.lastValue()); @@ -101,6 +107,7 @@ void aggregationIsCompatible() { assertThat(lastValue.isCompatibleWithInstrument(upDownCounter)).isFalse(); assertThat(lastValue.isCompatibleWithInstrument(observableUpDownCounter)).isFalse(); assertThat(lastValue.isCompatibleWithInstrument(observableGauge)).isTrue(); + assertThat(lastValue.isCompatibleWithInstrument(gauge)).isTrue(); assertThat(lastValue.isCompatibleWithInstrument(histogram)).isFalse(); } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/CardinalityTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/CardinalityTest.java index 3a5c0edc733..17989d14a1a 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/CardinalityTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/CardinalityTest.java @@ -20,9 +20,8 @@ import io.opentelemetry.sdk.metrics.data.LongPointData; import io.opentelemetry.sdk.metrics.data.PointData; import io.opentelemetry.sdk.metrics.data.SumData; +import io.opentelemetry.sdk.metrics.export.CardinalityLimitSelector; import io.opentelemetry.sdk.metrics.export.MetricReader; -import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil; -import io.opentelemetry.sdk.metrics.internal.export.CardinalityLimitSelector; import io.opentelemetry.sdk.metrics.internal.state.DefaultSynchronousMetricStorage; import io.opentelemetry.sdk.metrics.internal.state.MetricStorage; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; @@ -315,28 +314,24 @@ void readerAndViewCardinalityConfiguration() { // other instrument kinds CardinalityLimitSelector cardinalityLimitSelector = instrumentType -> instrumentType == InstrumentType.COUNTER ? counterLimit : generalLimit; - SdkMeterProviderBuilder builder = SdkMeterProvider.builder(); - - // Register both the delta and cumulative reader with the customized cardinality selector - SdkMeterProviderUtil.registerMetricReaderWithCardinalitySelector( - builder, deltaReader, cardinalityLimitSelector); - SdkMeterProviderUtil.registerMetricReaderWithCardinalitySelector( - builder, cumulativeReader, cardinalityLimitSelector); - - // Register a view which defines a custom cardinality limit for instrumented named "counter2" - ViewBuilder viewBuilder1 = View.builder(); - SdkMeterProviderUtil.setCardinalityLimit(viewBuilder1, counter2Limit); - builder.registerView( - InstrumentSelector.builder().setName("counter2").build(), viewBuilder1.build()); - - // Register a view which defines a custom cardinality limit for instrumented named - // "asyncCounter" - ViewBuilder viewBuilder2 = View.builder(); - SdkMeterProviderUtil.setCardinalityLimit(viewBuilder2, asyncCounterLimit); - builder.registerView( - InstrumentSelector.builder().setName("asyncCounter").build(), viewBuilder2.build()); + SdkMeterProvider sdkMeterProvider = + SdkMeterProvider.builder() + // Register both the delta and cumulative reader with the customized cardinality + // selector + .registerMetricReader(deltaReader, cardinalityLimitSelector) + .registerMetricReader(cumulativeReader, cardinalityLimitSelector) + // Register a view which defines a custom cardinality limit for instrumented named + // "counter2" + .registerView( + InstrumentSelector.builder().setName("counter2").build(), + View.builder().setCardinalityLimit(counter2Limit).build()) + // Register a view which defines a custom cardinality limit for instrumented named + // "asyncCounter" + .registerView( + InstrumentSelector.builder().setName("asyncCounter").build(), + View.builder().setCardinalityLimit(asyncCounterLimit).build()) + .build(); - SdkMeterProvider sdkMeterProvider = builder.build(); meter = sdkMeterProvider.get(CardinalityTest.class.getName()); LongCounter counter1 = meter.counterBuilder("counter1").build(); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/ExplicitBucketBoundariesAdviceTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/ExplicitBucketBoundariesAdviceTest.java index e25eb098e9e..16d4d05cac0 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/ExplicitBucketBoundariesAdviceTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/ExplicitBucketBoundariesAdviceTest.java @@ -8,10 +8,10 @@ import static io.opentelemetry.sdk.metrics.Aggregation.explicitBucketHistogram; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import io.github.netmikey.logunit.api.LogCapturer; import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongHistogram; -import io.opentelemetry.extension.incubator.metrics.ExtendedDoubleHistogramBuilder; -import io.opentelemetry.extension.incubator.metrics.ExtendedLongHistogramBuilder; +import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; @@ -21,6 +21,7 @@ import java.util.function.Function; import java.util.stream.Stream; import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -29,6 +30,12 @@ class ExplicitBucketBoundariesAdviceTest { private SdkMeterProvider meterProvider = SdkMeterProvider.builder().build(); + @RegisterExtension + LogCapturer logCapturer = + LogCapturer.create() + .captureForLogger(SdkLongHistogram.class.getName()) + .captureForLogger(SdkDoubleHistogram.class.getName()); + @AfterEach void cleanup() { meterProvider.close(); @@ -61,6 +68,8 @@ void histogramWithoutAdvice(Function> histogram .hasBucketBoundaries( 0d, 5d, 10d, 25d, 50d, 75d, 100d, 250d, 500d, 750d, 1_000d, 2_500d, 5_000d, 7_500d, 10_000d)))); + + assertThat(logCapturer.getEvents()).isEmpty(); } @ParameterizedTest @@ -88,6 +97,8 @@ void histogramWithAdvice(Function> histogramBui point .hasBucketCounts(1, 1, 1, 1) .hasBucketBoundaries(10.0, 20.0, 30.0)))); + + assertThat(logCapturer.getEvents()).isEmpty(); } @ParameterizedTest @@ -120,6 +131,8 @@ void histogramWithAdviceAndViews(Function> hist histogram -> histogram.hasPointsSatisfying( point -> point.hasBucketCounts(4, 0).hasBucketBoundaries(50.0)))); + + assertThat(logCapturer.getEvents()).isEmpty(); } @ParameterizedTest @@ -151,6 +164,42 @@ void histogramWithAdviceAndReaderAggregationPreference( histogram -> histogram.hasPointsSatisfying( point -> point.hasBucketCounts(4, 0).hasBucketBoundaries(50.0)))); + + assertThat(logCapturer.getEvents()).isEmpty(); + } + + @ParameterizedTest + @MethodSource("histogramsWithInvalidAdvice") + @SuppressLogger(SdkDoubleHistogram.class) + @SuppressLogger(SdkLongHistogram.class) + void histogramWithInvalidAdvice( + Function> histogramBuilder, String expectedErrorMessage) { + InMemoryMetricReader reader = InMemoryMetricReader.create(); + meterProvider = SdkMeterProvider.builder().registerMetricReader(reader).build(); + + Consumer histogramRecorder = histogramBuilder.apply(meterProvider); + histogramRecorder.accept(5L); + histogramRecorder.accept(15L); + histogramRecorder.accept(25L); + histogramRecorder.accept(35L); + + // Should use default bucket bounds + assertThat(reader.collectAllMetrics()) + .satisfiesExactly( + metric -> + assertThat(metric) + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasBucketCounts( + 0, 1, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasBucketBoundaries( + 0d, 5d, 10d, 25d, 50d, 75d, 100d, 250d, 500d, 750d, + 1_000d, 2_500d, 5_000d, 7_500d, 10_000d)))); + + logCapturer.assertContains(expectedErrorMessage); } private static Stream histogramsWithoutAdvice() { @@ -158,16 +207,16 @@ private static Stream histogramsWithoutAdvice() { Arguments.of( (Function>) meterProvider -> { - DoubleHistogram build = + DoubleHistogram histogram = meterProvider.get("meter").histogramBuilder("histogram").build(); - return build::record; + return histogram::record; }), Arguments.of( (Function>) meterProvider -> { - LongHistogram build = + LongHistogram histogram = meterProvider.get("meter").histogramBuilder("histogram").ofLongs().build(); - return build::record; + return histogram::record; })); } @@ -176,27 +225,66 @@ private static Stream histogramsWithAdvice() { Arguments.of( (Function>) meterProvider -> { - DoubleHistogram build = - ((ExtendedDoubleHistogramBuilder) - meterProvider.get("meter").histogramBuilder("histogram")) - .setAdvice( - advice -> - advice.setExplicitBucketBoundaries( - Arrays.asList(10.0, 20.0, 30.0))) + DoubleHistogram histogram = + meterProvider + .get("meter") + .histogramBuilder("histogram") + .setExplicitBucketBoundariesAdvice(Arrays.asList(10.0, 20.0, 30.0)) .build(); - return build::record; + return histogram::record; }), Arguments.of( (Function>) meterProvider -> { - LongHistogram build = - ((ExtendedLongHistogramBuilder) - meterProvider.get("meter").histogramBuilder("histogram").ofLongs()) - .setAdvice( - advice -> - advice.setExplicitBucketBoundaries(Arrays.asList(10L, 20L, 30L))) + LongHistogram histogram = + meterProvider + .get("meter") + .histogramBuilder("histogram") + .ofLongs() + .setExplicitBucketBoundariesAdvice(Arrays.asList(10L, 20L, 30L)) .build(); - return build::record; + return histogram::record; })); } + + private static Stream histogramsWithInvalidAdvice() { + return Stream.of( + Arguments.of( + (Function>) + meterProvider -> { + DoubleHistogram histogram = + meterProvider + .get("meter") + .histogramBuilder("histogram") + .setExplicitBucketBoundariesAdvice(Arrays.asList(10.0, 9.0, 8.0)) + .build(); + return histogram::record; + }, + "Error setting explicit bucket boundaries advice: Bucket boundaries must be in increasing order: 10.0 >= 9.0"), + Arguments.of( + (Function>) + meterProvider -> { + LongHistogram histogram = + meterProvider + .get("meter") + .histogramBuilder("histogram") + .ofLongs() + .setExplicitBucketBoundariesAdvice(Arrays.asList(10L, 9L, 8L)) + .build(); + return histogram::record; + }, + "Error setting explicit bucket boundaries advice: Bucket boundaries must be in increasing order: 10.0 >= 9.0"), + Arguments.of( + (Function>) + meterProvider -> { + DoubleHistogram histogram = + meterProvider + .get("meter") + .histogramBuilder("histogram") + .setExplicitBucketBoundariesAdvice(null) + .build(); + return histogram::record; + }, + "Error setting explicit bucket boundaries advice: bucketBoundaries must not be null")); + } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/IdentityTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/IdentityTest.java index 9982e2e439b..195c8730e45 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/IdentityTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/IdentityTest.java @@ -8,7 +8,6 @@ import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import io.github.netmikey.logunit.api.LogCapturer; -import io.opentelemetry.extension.incubator.metrics.ExtendedDoubleHistogramBuilder; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.metrics.internal.state.MetricStorageRegistry; @@ -222,14 +221,18 @@ void sameMeterSameInstrumentNameDifferentNonIdentifyingFieldsNoViews() { // Register histogram1, with and without advice. First registration without advice wins. meterProvider.get("meter1").histogramBuilder("histogram1").build().record(8); - ((ExtendedDoubleHistogramBuilder) meterProvider.get("meter1").histogramBuilder("histogram1")) - .setAdvice(advice -> advice.setExplicitBucketBoundaries(Arrays.asList(10.0, 20.0, 30.0))) + meterProvider + .get("meter1") + .histogramBuilder("histogram1") + .setExplicitBucketBoundariesAdvice(Arrays.asList(10.0, 20.0, 30.0)) .build() .record(8); // Register histogram2, with and without advice. First registration with advice wins. - ((ExtendedDoubleHistogramBuilder) meterProvider.get("meter1").histogramBuilder("histogram2")) - .setAdvice(advice -> advice.setExplicitBucketBoundaries(Arrays.asList(10.0, 20.0, 30.0))) + meterProvider + .get("meter1") + .histogramBuilder("histogram2") + .setExplicitBucketBoundariesAdvice(Arrays.asList(10.0, 20.0, 30.0)) .build() .record(8); meterProvider.get("meter1").histogramBuilder("histogram2").build().record(8); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/InstrumentBuilderTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/InstrumentBuilderTest.java new file mode 100644 index 00000000000..de1a351e619 --- /dev/null +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/InstrumentBuilderTest.java @@ -0,0 +1,74 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.internal.MeterConfig; +import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; +import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarFilter; +import io.opentelemetry.sdk.metrics.internal.state.MeterProviderSharedState; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.time.TestClock; +import java.util.Collections; +import org.junit.jupiter.api.Test; + +class InstrumentBuilderTest { + + public static final MeterProviderSharedState PROVIDER_SHARED_STATE = + MeterProviderSharedState.create( + TestClock.create(), Resource.getDefault(), ExemplarFilter.alwaysOff(), 0); + static final InstrumentationScopeInfo SCOPE = InstrumentationScopeInfo.create("scope-name"); + public static final SdkMeter SDK_METER = + new SdkMeter( + PROVIDER_SHARED_STATE, SCOPE, Collections.emptyList(), MeterConfig.defaultConfig()); + + @Test + void stringRepresentation() { + InstrumentBuilder builder = + new InstrumentBuilder( + "instrument-name", InstrumentType.COUNTER, InstrumentValueType.LONG, SDK_METER) + .setDescription("instrument-description") + .setUnit("instrument-unit") + .setAdviceBuilder(Advice.builder()); + assertThat(builder.toString()) + .isEqualTo( + "InstrumentBuilder{" + + "descriptor=" + + "InstrumentDescriptor{" + + "name=instrument-name, " + + "description=instrument-description, " + + "unit=instrument-unit, " + + "type=COUNTER, " + + "valueType=LONG, " + + "advice=Advice{explicitBucketBoundaries=null, attributes=null}" + + "}}"); + } + + @Test + void toStringHelper() { + InstrumentBuilder builder = + new InstrumentBuilder( + "instrument-name", InstrumentType.HISTOGRAM, InstrumentValueType.DOUBLE, SDK_METER) + .setDescription("instrument-description") + .setUnit("instrument-unit") + .setAdviceBuilder(Advice.builder()); + + assertThat(builder.toStringHelper("FooBuilder")) + .isEqualTo( + "FooBuilder{" + + "descriptor=" + + "InstrumentDescriptor{" + + "name=instrument-name, " + + "description=instrument-description, " + + "unit=instrument-unit, " + + "type=HISTOGRAM, " + + "valueType=DOUBLE, " + + "advice=Advice{explicitBucketBoundaries=null, attributes=null}" + + "}}"); + } +} diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleCounterTest.java index 215eaa2fe31..d733821972b 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleCounterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleCounterTest.java @@ -16,6 +16,7 @@ import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.internal.state.DefaultSynchronousMetricStorage; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import io.opentelemetry.sdk.testing.time.TestClock; @@ -166,6 +167,14 @@ void doubleCounterAdd_Monotonicity() { "Counters can only increase. Instrument testCounter has recorded a negative value."); } + @Test + @SuppressLogger(DefaultSynchronousMetricStorage.class) + void doubleCounterAdd_NaN() { + DoubleCounter doubleCounter = sdkMeter.counterBuilder("testCounter").ofDoubles().build(); + doubleCounter.add(Double.NaN); + assertThat(sdkMeterReader.collectAllMetrics()).hasSize(0); + } + @Test void stressTest() { DoubleCounter doubleCounter = sdkMeter.counterBuilder("testCounter").ofDoubles().build(); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleGaugeTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleGaugeTest.java index 404ed738d83..61c0d3ad0b9 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleGaugeTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleGaugeTest.java @@ -11,17 +11,23 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleGauge; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.ObservableDoubleGauge; -import io.opentelemetry.extension.incubator.metrics.DoubleGauge; -import io.opentelemetry.extension.incubator.metrics.ExtendedDoubleGaugeBuilder; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; +import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.internal.state.DefaultSynchronousMetricStorage; +import io.opentelemetry.sdk.metrics.internal.state.SdkObservableMeasurement; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import io.opentelemetry.sdk.testing.time.TestClock; +import io.opentelemetry.sdk.trace.SdkTracerProvider; import java.time.Duration; +import java.util.Collections; import java.util.stream.IntStream; -import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; /** Unit tests for {@link SdkDoubleGauge}. */ @@ -45,21 +51,25 @@ class SdkDoubleGaugeTest { @Test void set_PreventNullAttributes() { - assertThatThrownBy( - () -> - ((ExtendedDoubleGaugeBuilder) sdkMeter.gaugeBuilder("testGauge")) - .build() - .set(1.0, null)) + assertThatThrownBy(() -> sdkMeter.gaugeBuilder("testGauge").build().set(1.0, null)) .isInstanceOf(NullPointerException.class) .hasMessage("attributes"); } + @Test + @SuppressLogger(DefaultSynchronousMetricStorage.class) + void set_NaN() { + DoubleGauge gauge = sdkMeter.gaugeBuilder("testGauge").build(); + gauge.set(Double.NaN); + assertThat(cumulativeReader.collectAllMetrics()).hasSize(0); + } + @Test void observable_RemoveCallback() { ObservableDoubleGauge gauge = sdkMeter.gaugeBuilder("testGauge").buildWithCallback(measurement -> measurement.record(10)); - Assertions.assertThat(cumulativeReader.collectAllMetrics()) + assertThat(cumulativeReader.collectAllMetrics()) .satisfiesExactly( metric -> assertThat(metric) @@ -69,21 +79,28 @@ void observable_RemoveCallback() { gauge.close(); - Assertions.assertThat(cumulativeReader.collectAllMetrics()).hasSize(0); + assertThat(cumulativeReader.collectAllMetrics()).hasSize(0); + } + + @Test + @SuppressLogger(SdkObservableMeasurement.class) + void observable_NaN() { + sdkMeter + .gaugeBuilder("testGauge") + .buildWithCallback(measurement -> measurement.record(Double.NaN)); + assertThat(cumulativeReader.collectAllMetrics()).hasSize(0); } @Test void collectMetrics_NoRecords() { - ((ExtendedDoubleGaugeBuilder) sdkMeter.gaugeBuilder("testGauge")).build(); + sdkMeter.gaugeBuilder("testGauge").build(); assertThat(cumulativeReader.collectAllMetrics()).isEmpty(); } @Test void collectMetrics_WithEmptyAttributes() { DoubleGauge doubleGauge = - ((ExtendedDoubleGaugeBuilder) - sdkMeter.gaugeBuilder("testGauge").setDescription("description").setUnit("K")) - .build(); + sdkMeter.gaugeBuilder("testGauge").setDescription("description").setUnit("K").build(); testClock.advance(Duration.ofNanos(SECOND_NANOS)); doubleGauge.set(12d, Attributes.empty()); doubleGauge.set(13d); @@ -107,11 +124,59 @@ void collectMetrics_WithEmptyAttributes() { .hasValue(13d)))); } + @Test + void collectMetrics_WithExemplars() { + InMemoryMetricReader reader = InMemoryMetricReader.create(); + SdkMeterProvider sdkMeterProvider = + SdkMeterProvider.builder() + .setClock(testClock) + .setResource(RESOURCE) + .registerView( + InstrumentSelector.builder().setName("*").build(), + View.builder().setAttributeFilter(Collections.emptySet()).build()) + .registerMetricReader(reader) + .build(); + Meter sdkMeter = sdkMeterProvider.get(getClass().getName()); + DoubleGauge doubleGauge = + sdkMeter.gaugeBuilder("testGauge").setDescription("description").setUnit("K").build(); + + SdkTracerProvider tracerProvider = SdkTracerProvider.builder().build(); + Tracer tracer = tracerProvider.get("foo"); + + Span span = tracer.spanBuilder("span").startSpan(); + try (Scope unused = span.makeCurrent()) { + doubleGauge.set(12d, Attributes.builder().put("key", "value").build()); + } + + assertThat(reader.collectAllMetrics()) + .satisfiesExactly( + metric -> + assertThat(metric) + .hasResource(RESOURCE) + .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) + .hasName("testGauge") + .hasDescription("description") + .hasUnit("K") + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(12d) + .hasExemplarsSatisfying( + exemplar -> + exemplar + .hasValue(12d) + .hasFilteredAttributes( + Attributes.builder() + .put("key", "value") + .build()))))); + } + @Test void collectMetrics_WithMultipleCollects() { long startTime = testClock.now(); - DoubleGauge doubleGauge = - ((ExtendedDoubleGaugeBuilder) sdkMeter.gaugeBuilder("testGauge")).build(); + DoubleGauge doubleGauge = sdkMeter.gaugeBuilder("testGauge").build(); doubleGauge.set(12.1d, Attributes.empty()); doubleGauge.set(123.3d, Attributes.builder().put("K", "V").build()); doubleGauge.set(21.4d, Attributes.empty()); @@ -209,8 +274,7 @@ void collectMetrics_WithMultipleCollects() { @Test void stressTest() { - DoubleGauge doubleGauge = - ((ExtendedDoubleGaugeBuilder) sdkMeter.gaugeBuilder("testGauge")).build(); + DoubleGauge doubleGauge = sdkMeter.gaugeBuilder("testGauge").build(); StressTestRunner.Builder stressTestBuilder = StressTestRunner.builder().setCollectionIntervalMs(100); @@ -249,8 +313,7 @@ void stressTest() { void stressTest_WithDifferentLabelSet() { String[] keys = {"Key_1", "Key_2", "Key_3", "Key_4"}; String[] values = {"Value_1", "Value_2", "Value_3", "Value_4"}; - DoubleGauge doubleGauge = - ((ExtendedDoubleGaugeBuilder) sdkMeter.gaugeBuilder("testGauge")).build(); + DoubleGauge doubleGauge = sdkMeter.gaugeBuilder("testGauge").build(); StressTestRunner.Builder stressTestBuilder = StressTestRunner.builder().setCollectionIntervalMs(100); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleHistogramTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleHistogramTest.java index 6862a1d8fc9..dd210faad46 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleHistogramTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleHistogramTest.java @@ -14,11 +14,16 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.internal.state.DefaultSynchronousMetricStorage; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import io.opentelemetry.sdk.testing.time.TestClock; +import io.opentelemetry.sdk.trace.SdkTracerProvider; import java.time.Duration; import java.util.Arrays; import java.util.Collections; @@ -266,6 +271,105 @@ void doubleHistogramRecord_NonNegativeCheck() { "Histograms can only record non-negative values. Instrument testHistogram has recorded a negative value."); } + @Test + @SuppressLogger(DefaultSynchronousMetricStorage.class) + void doubleHistogramRecord_NaN() { + DoubleHistogram histogram = sdkMeter.histogramBuilder("testHistogram").build(); + histogram.record(Double.NaN); + assertThat(sdkMeterReader.collectAllMetrics()).hasSize(0); + } + + @Test + void collectMetrics_ExemplarsWithExponentialHistogram() { + InMemoryMetricReader reader = InMemoryMetricReader.create(); + SdkMeterProvider sdkMeterProvider = + SdkMeterProvider.builder() + .setClock(testClock) + .setResource(RESOURCE) + .registerView( + InstrumentSelector.builder().setType(InstrumentType.HISTOGRAM).build(), + View.builder() + .setAggregation(Aggregation.base2ExponentialBucketHistogram()) + .setAttributeFilter(Collections.emptySet()) + .build()) + .registerMetricReader(reader) + .build(); + Meter sdkMeter = sdkMeterProvider.get(getClass().getName()); + DoubleHistogram histogram = sdkMeter.histogramBuilder("testHistogram").build(); + + SdkTracerProvider tracerProvider = SdkTracerProvider.builder().build(); + Tracer tracer = tracerProvider.get("foo"); + + Span span = tracer.spanBuilder("span").startSpan(); + try (Scope unused = span.makeCurrent()) { + histogram.record(10, Attributes.builder().put("key", "value").build()); + } + + assertThat(reader.collectAllMetrics()) + .satisfiesExactly( + metric -> + assertThat(metric) + .hasExponentialHistogramSatisfying( + exponentialHistogram -> + exponentialHistogram.hasPointsSatisfying( + point -> + point + .hasSum(10.0) + .hasAttributes(Attributes.empty()) + .hasExemplarsSatisfying( + exemplar -> + exemplar + .hasValue(10.0) + .hasFilteredAttributes( + Attributes.builder() + .put("key", "value") + .build()))))); + } + + @Test + void collectMetrics_ExemplarsWithExplicitBucketHistogram() { + InMemoryMetricReader reader = InMemoryMetricReader.create(); + SdkMeterProvider sdkMeterProvider = + SdkMeterProvider.builder() + .setClock(testClock) + .setResource(RESOURCE) + .registerView( + InstrumentSelector.builder().setName("*").build(), + View.builder().setAttributeFilter(Collections.emptySet()).build()) + .registerMetricReader(reader) + .build(); + Meter sdkMeter = sdkMeterProvider.get(getClass().getName()); + DoubleHistogram histogram = sdkMeter.histogramBuilder("testHistogram").build(); + + SdkTracerProvider tracerProvider = SdkTracerProvider.builder().build(); + Tracer tracer = tracerProvider.get("foo"); + + Span span = tracer.spanBuilder("span").startSpan(); + try (Scope unused = span.makeCurrent()) { + histogram.record(10, Attributes.builder().put("key", "value").build()); + } + + assertThat(reader.collectAllMetrics()) + .satisfiesExactly( + metric -> + assertThat(metric) + .hasHistogramSatisfying( + explicitHistogram -> + explicitHistogram.hasPointsSatisfying( + point -> + point + .hasSum(10) + .hasAttributes(Attributes.empty()) + .hasExemplarsSatisfying( + exemplar -> + exemplar + .hasValue(10.0) + .hasFilteredAttributes( + Attributes.builder() + .put("key", "value") + .build()))))); + } + @Test void stressTest() { DoubleHistogram doubleHistogram = sdkMeter.histogramBuilder("testHistogram").build(); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleUpDownCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleUpDownCounterTest.java index 6b30b2b13a5..a8dc0041c0d 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleUpDownCounterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleUpDownCounterTest.java @@ -254,4 +254,18 @@ void stressTest_WithDifferentLabelSet() { .hasValue(20_000) .hasAttributes(attributeEntry(keys[3], values[3]))))); } + + @Test + void testToString() { + String expected = + "SdkDoubleUpDownCounter{descriptor=InstrumentDescriptor{name=testUpDownCounter, description=description, unit=ms, type=UP_DOWN_COUNTER, valueType=DOUBLE, advice=Advice{explicitBucketBoundaries=null, attributes=null}}}"; + DoubleUpDownCounter counter = + sdkMeter + .upDownCounterBuilder("testUpDownCounter") + .ofDoubles() + .setDescription("description") + .setUnit("ms") + .build(); + assertThat(counter).hasToString(expected); + } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongGaugeTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongGaugeTest.java index dbe04000bef..0a117e48d82 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongGaugeTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongGaugeTest.java @@ -11,15 +11,19 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.LongGauge; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.ObservableLongGauge; -import io.opentelemetry.extension.incubator.metrics.ExtendedLongGaugeBuilder; -import io.opentelemetry.extension.incubator.metrics.LongGauge; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import io.opentelemetry.sdk.testing.time.TestClock; +import io.opentelemetry.sdk.trace.SdkTracerProvider; import java.time.Duration; +import java.util.Collections; import java.util.stream.IntStream; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; @@ -45,11 +49,7 @@ class SdkLongGaugeTest { @Test void set_PreventNullAttributes() { - assertThatThrownBy( - () -> - ((ExtendedLongGaugeBuilder) sdkMeter.gaugeBuilder("testGauge").ofLongs()) - .build() - .set(1, null)) + assertThatThrownBy(() -> sdkMeter.gaugeBuilder("testGauge").ofLongs().build().set(1, null)) .isInstanceOf(NullPointerException.class) .hasMessage("attributes"); } @@ -77,19 +77,18 @@ void observable_RemoveCallback() { @Test void collectMetrics_NoRecords() { - ((ExtendedLongGaugeBuilder) sdkMeter.gaugeBuilder("testGauge").ofLongs()).build(); + sdkMeter.gaugeBuilder("testGauge").ofLongs().build(); assertThat(cumulativeReader.collectAllMetrics()).isEmpty(); } @Test void collectMetrics_WithEmptyAttributes() { LongGauge longGauge = - ((ExtendedLongGaugeBuilder) - sdkMeter - .gaugeBuilder("testGauge") - .ofLongs() - .setDescription("description") - .setUnit("K")) + sdkMeter + .gaugeBuilder("testGauge") + .ofLongs() + .setDescription("description") + .setUnit("K") .build(); testClock.advance(Duration.ofNanos(SECOND_NANOS)); longGauge.set(12, Attributes.empty()); @@ -114,11 +113,64 @@ void collectMetrics_WithEmptyAttributes() { .hasValue(13)))); } + @Test + void collectMetrics_WithExemplars() { + InMemoryMetricReader reader = InMemoryMetricReader.create(); + SdkMeterProvider sdkMeterProvider = + SdkMeterProvider.builder() + .setClock(testClock) + .setResource(RESOURCE) + .registerView( + InstrumentSelector.builder().setName("*").build(), + View.builder().setAttributeFilter(Collections.emptySet()).build()) + .registerMetricReader(reader) + .build(); + Meter sdkMeter = sdkMeterProvider.get(getClass().getName()); + LongGauge longGauge = + sdkMeter + .gaugeBuilder("testGauge") + .setDescription("description") + .setUnit("K") + .ofLongs() + .build(); + + SdkTracerProvider tracerProvider = SdkTracerProvider.builder().build(); + Tracer tracer = tracerProvider.get("foo"); + + Span span = tracer.spanBuilder("span").startSpan(); + try (Scope unused = span.makeCurrent()) { + longGauge.set(12L, Attributes.builder().put("key", "value").build()); + } + + assertThat(reader.collectAllMetrics()) + .satisfiesExactly( + metric -> + assertThat(metric) + .hasResource(RESOURCE) + .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) + .hasName("testGauge") + .hasDescription("description") + .hasUnit("K") + .hasLongGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(12L) + .hasExemplarsSatisfying( + exemplar -> + exemplar + .hasValue(12L) + .hasFilteredAttributes( + Attributes.builder() + .put("key", "value") + .build()))))); + } + @Test void collectMetrics_WithMultipleCollects() { long startTime = testClock.now(); - LongGauge longGauge = - ((ExtendedLongGaugeBuilder) sdkMeter.gaugeBuilder("testGauge").ofLongs()).build(); + LongGauge longGauge = sdkMeter.gaugeBuilder("testGauge").ofLongs().build(); longGauge.set(12, Attributes.empty()); longGauge.set(12, Attributes.builder().put("K", "V").build()); longGauge.set(21, Attributes.empty()); @@ -216,8 +268,7 @@ void collectMetrics_WithMultipleCollects() { @Test void stressTest() { - LongGauge longGauge = - ((ExtendedLongGaugeBuilder) sdkMeter.gaugeBuilder("testGauge").ofLongs()).build(); + LongGauge longGauge = sdkMeter.gaugeBuilder("testGauge").ofLongs().build(); StressTestRunner.Builder stressTestBuilder = StressTestRunner.builder().setCollectionIntervalMs(100); @@ -256,8 +307,7 @@ void stressTest() { void stressTest_WithDifferentLabelSet() { String[] keys = {"Key_1", "Key_2", "Key_3", "Key_4"}; String[] values = {"Value_1", "Value_2", "Value_3", "Value_4"}; - LongGauge longGauge = - ((ExtendedLongGaugeBuilder) sdkMeter.gaugeBuilder("testGauge").ofLongs()).build(); + LongGauge longGauge = sdkMeter.gaugeBuilder("testGauge").ofLongs().build(); StressTestRunner.Builder stressTestBuilder = StressTestRunner.builder().setCollectionIntervalMs(100); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongHistogramTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongHistogramTest.java index 09e33faf2cc..dd802ff613f 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongHistogramTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongHistogramTest.java @@ -14,11 +14,15 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.LongHistogram; import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import io.opentelemetry.sdk.testing.time.TestClock; +import io.opentelemetry.sdk.trace.SdkTracerProvider; import java.time.Duration; import java.util.Arrays; import java.util.Collections; @@ -435,6 +439,97 @@ void longHistogramRecord_NonNegativeCheck() { "Histograms can only record non-negative values. Instrument testHistogram has recorded a negative value."); } + @Test + void collectMetrics_ExemplarsWithExponentialHistogram() { + InMemoryMetricReader reader = InMemoryMetricReader.create(); + SdkMeterProvider sdkMeterProvider = + SdkMeterProvider.builder() + .setClock(testClock) + .setResource(RESOURCE) + .registerView( + InstrumentSelector.builder().setType(InstrumentType.HISTOGRAM).build(), + View.builder() + .setAggregation(Aggregation.base2ExponentialBucketHistogram()) + .setAttributeFilter(Collections.emptySet()) + .build()) + .registerMetricReader(reader) + .build(); + Meter sdkMeter = sdkMeterProvider.get(getClass().getName()); + LongHistogram histogram = sdkMeter.histogramBuilder("testHistogram").ofLongs().build(); + + SdkTracerProvider tracerProvider = SdkTracerProvider.builder().build(); + Tracer tracer = tracerProvider.get("foo"); + + Span span = tracer.spanBuilder("span").startSpan(); + try (Scope unused = span.makeCurrent()) { + histogram.record(10, Attributes.builder().put("key", "value").build()); + } + + assertThat(reader.collectAllMetrics()) + .satisfiesExactly( + metric -> + assertThat(metric) + .hasExponentialHistogramSatisfying( + exponentialHistogram -> + exponentialHistogram.hasPointsSatisfying( + point -> + point + .hasSum(10.0) + .hasAttributes(Attributes.empty()) + .hasExemplarsSatisfying( + exemplar -> + exemplar + .hasValue(10.0) + .hasFilteredAttributes( + Attributes.builder() + .put("key", "value") + .build()))))); + } + + @Test + void collectMetrics_ExemplarsWithExplicitBucketHistogram() { + InMemoryMetricReader reader = InMemoryMetricReader.create(); + SdkMeterProvider sdkMeterProvider = + SdkMeterProvider.builder() + .setClock(testClock) + .setResource(RESOURCE) + .registerView( + InstrumentSelector.builder().setName("*").build(), + View.builder().setAttributeFilter(Collections.emptySet()).build()) + .registerMetricReader(reader) + .build(); + Meter sdkMeter = sdkMeterProvider.get(getClass().getName()); + LongHistogram histogram = sdkMeter.histogramBuilder("testHistogram").ofLongs().build(); + + SdkTracerProvider tracerProvider = SdkTracerProvider.builder().build(); + Tracer tracer = tracerProvider.get("foo"); + + Span span = tracer.spanBuilder("span").startSpan(); + try (Scope unused = span.makeCurrent()) { + histogram.record(10, Attributes.builder().put("key", "value").build()); + } + + assertThat(reader.collectAllMetrics()) + .satisfiesExactly( + metric -> + assertThat(metric) + .hasHistogramSatisfying( + explicitHistogram -> + explicitHistogram.hasPointsSatisfying( + point -> + point + .hasSum(10) + .hasAttributes(Attributes.empty()) + .hasExemplarsSatisfying( + exemplar -> + exemplar + .hasValue(10.0) + .hasFilteredAttributes( + Attributes.builder() + .put("key", "value") + .build()))))); + } + @Test void stressTest() { LongHistogram longHistogram = sdkMeter.histogramBuilder("testHistogram").ofLongs().build(); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkMeterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkMeterTest.java index 8cf501da64b..468d0121e2b 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkMeterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkMeterTest.java @@ -94,7 +94,7 @@ void builder_InvalidName() { void checkValidInstrumentName_InvalidNameLogs() { assertThat(checkValidInstrumentName("1")).isFalse(); sdkMeterLogs.assertContains( - "Instrument name \"1\" is invalid, returning noop instrument. Instrument names must consist of 255 or fewer characters including alphanumeric, _, ., -, and start with a letter."); + "Instrument name \"1\" is invalid, returning noop instrument. Instrument names must consist of 255 or fewer characters including alphanumeric, _, ., -, /, and start with a letter."); } @Test @@ -110,6 +110,7 @@ void checkValidInstrumentNameTest() { assertThat(checkValidInstrumentName("a1234567890")).isTrue(); assertThat(checkValidInstrumentName("a_-.")).isTrue(); assertThat(checkValidInstrumentName(new String(new char[255]).replace('\0', 'a'))).isTrue(); + assertThat(checkValidInstrumentName("a/b")).isTrue(); // Empty and null not allowed assertThat(checkValidInstrumentName(null)).isFalse(); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleCounterTest.java index a568a3ffb2d..cc1c887bf3a 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleCounterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleCounterTest.java @@ -8,10 +8,13 @@ import static io.opentelemetry.api.common.AttributeKey.stringKey; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; +import static org.assertj.core.api.Assertions.assertThat; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.ObservableDoubleCounter; +import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.internal.state.SdkObservableMeasurement; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import io.opentelemetry.sdk.testing.time.TestClock; @@ -53,6 +56,20 @@ void removeCallback() { assertThat(sdkMeterReader.collectAllMetrics()).hasSize(0); } + @Test + @SuppressLogger(SdkObservableMeasurement.class) + void observable_NaN() { + InMemoryMetricReader sdkMeterReader = InMemoryMetricReader.create(); + SdkMeterProvider sdkMeterProvider = + sdkMeterProviderBuilder.registerMetricReader(sdkMeterReader).build(); + sdkMeterProvider + .get(getClass().getName()) + .counterBuilder("testObserver") + .ofDoubles() + .buildWithCallback(measurement -> measurement.record(Double.NaN)); + assertThat(sdkMeterReader.collectAllMetrics()).hasSize(0); + } + @Test void collectMetrics_NoRecords() { InMemoryMetricReader sdkMeterReader = InMemoryMetricReader.create(); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleUpDownCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleUpDownCounterTest.java index daf0dc0f18e..8b212220093 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleUpDownCounterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleUpDownCounterTest.java @@ -8,12 +8,15 @@ import static io.opentelemetry.api.common.AttributeKey.stringKey; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; +import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.mock; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.ObservableDoubleUpDownCounter; +import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.internal.state.SdkObservableMeasurement; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import io.opentelemetry.sdk.testing.time.TestClock; @@ -55,6 +58,20 @@ void removeCallback() { assertThat(sdkMeterReader.collectAllMetrics()).hasSize(0); } + @Test + @SuppressLogger(SdkObservableMeasurement.class) + void observable_NaN() { + InMemoryMetricReader sdkMeterReader = InMemoryMetricReader.create(); + SdkMeterProvider sdkMeterProvider = + sdkMeterProviderBuilder.registerMetricReader(sdkMeterReader).build(); + sdkMeterProvider + .get(getClass().getName()) + .upDownCounterBuilder("testCounter") + .ofDoubles() + .buildWithCallback(measurement -> measurement.record(Double.NaN)); + assertThat(sdkMeterReader.collectAllMetrics()).hasSize(0); + } + @Test void collectMetrics_NoRecords() { InMemoryMetricReader sdkMeterReader = InMemoryMetricReader.create(); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableInstrumentTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableInstrumentTest.java index a2b9e60a60f..385e6f65a8c 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableInstrumentTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableInstrumentTest.java @@ -6,8 +6,8 @@ package io.opentelemetry.sdk.metrics; import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import io.github.netmikey.logunit.api.LogCapturer; @@ -16,7 +16,6 @@ import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.state.CallbackRegistration; -import io.opentelemetry.sdk.metrics.internal.state.MeterSharedState; import io.opentelemetry.sdk.metrics.internal.state.SdkObservableMeasurement; import java.util.Collections; import org.junit.jupiter.api.BeforeEach; @@ -29,14 +28,13 @@ class SdkObservableInstrumentTest { @RegisterExtension LogCapturer logs = LogCapturer.create().captureForType(SdkObservableInstrument.class); - private MeterSharedState meterSharedState; private CallbackRegistration callbackRegistration; + private SdkMeter sdkMeter; private SdkObservableInstrument observableInstrument; @BeforeEach void setup() { - meterSharedState = - spy(MeterSharedState.create(InstrumentationScopeInfo.empty(), Collections.emptyList())); + sdkMeter = mock(SdkMeter.class); callbackRegistration = CallbackRegistration.create( Collections.singletonList( @@ -52,7 +50,7 @@ void setup() { Collections.emptyList())), () -> {}); - observableInstrument = new SdkObservableInstrument(meterSharedState, callbackRegistration); + observableInstrument = new SdkObservableInstrument(sdkMeter, callbackRegistration); } @Test @@ -60,13 +58,13 @@ void setup() { void close() { // First call to close should trigger remove from meter shared state observableInstrument.close(); - verify(meterSharedState).removeCallback(callbackRegistration); + verify(sdkMeter).removeCallback(callbackRegistration); logs.assertDoesNotContain("has called close() multiple times."); // Close a second time should not trigger remove from meter shared state - Mockito.reset(meterSharedState); + Mockito.reset(sdkMeter); observableInstrument.close(); - verify(meterSharedState, never()).removeCallback(callbackRegistration); + verify(sdkMeter, never()).removeCallback(callbackRegistration); logs.assertContains("has called close() multiple times."); } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/export/AggregationTemporalitySelectorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/export/AggregationTemporalitySelectorTest.java index cb3affac280..fb821ce388e 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/export/AggregationTemporalitySelectorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/export/AggregationTemporalitySelectorTest.java @@ -28,6 +28,8 @@ void alwaysCumulative() { .isEqualTo(AggregationTemporality.CUMULATIVE); assertThat(selector.getAggregationTemporality(InstrumentType.OBSERVABLE_UP_DOWN_COUNTER)) .isEqualTo(AggregationTemporality.CUMULATIVE); + assertThat(selector.getAggregationTemporality(InstrumentType.GAUGE)) + .isEqualTo(AggregationTemporality.CUMULATIVE); } @Test @@ -45,6 +47,8 @@ void deltaPreferred() { .isEqualTo(AggregationTemporality.CUMULATIVE); assertThat(selector.getAggregationTemporality(InstrumentType.OBSERVABLE_UP_DOWN_COUNTER)) .isEqualTo(AggregationTemporality.CUMULATIVE); + assertThat(selector.getAggregationTemporality(InstrumentType.GAUGE)) + .isEqualTo(AggregationTemporality.DELTA); } @Test @@ -62,5 +66,37 @@ void lowMemory() { .isEqualTo(AggregationTemporality.CUMULATIVE); assertThat(selector.getAggregationTemporality(InstrumentType.OBSERVABLE_UP_DOWN_COUNTER)) .isEqualTo(AggregationTemporality.CUMULATIVE); + assertThat(selector.getAggregationTemporality(InstrumentType.GAUGE)) + .isEqualTo(AggregationTemporality.DELTA); + } + + @Test + void stringRepresentation() { + assertThat( + AggregationTemporalitySelector.asString( + AggregationTemporalitySelector.alwaysCumulative())) + .isEqualTo( + "AggregationTemporalitySelector{" + + "COUNTER=CUMULATIVE, " + + "UP_DOWN_COUNTER=CUMULATIVE, " + + "HISTOGRAM=CUMULATIVE, " + + "OBSERVABLE_COUNTER=CUMULATIVE, " + + "OBSERVABLE_UP_DOWN_COUNTER=CUMULATIVE, " + + "OBSERVABLE_GAUGE=CUMULATIVE, " + + "GAUGE=CUMULATIVE" + + "}"); + assertThat( + AggregationTemporalitySelector.asString( + AggregationTemporalitySelector.deltaPreferred())) + .isEqualTo( + "AggregationTemporalitySelector{" + + "COUNTER=DELTA, " + + "UP_DOWN_COUNTER=CUMULATIVE, " + + "HISTOGRAM=DELTA, " + + "OBSERVABLE_COUNTER=DELTA, " + + "OBSERVABLE_UP_DOWN_COUNTER=CUMULATIVE, " + + "OBSERVABLE_GAUGE=DELTA, " + + "GAUGE=DELTA" + + "}"); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/export/DefaultAggregationSelectorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/export/DefaultAggregationSelectorTest.java index 03d3a2c3689..1cbf0649c7a 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/export/DefaultAggregationSelectorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/export/DefaultAggregationSelectorTest.java @@ -45,6 +45,8 @@ void with() { .isEqualTo(Aggregation.defaultAggregation()); assertThat(selector1.getDefaultAggregation(InstrumentType.OBSERVABLE_GAUGE)) .isEqualTo(Aggregation.defaultAggregation()); + assertThat(selector1.getDefaultAggregation(InstrumentType.GAUGE)) + .isEqualTo(Aggregation.defaultAggregation()); DefaultAggregationSelector selector2 = selector1.with(InstrumentType.COUNTER, Aggregation.drop()); @@ -60,5 +62,36 @@ void with() { .isEqualTo(Aggregation.defaultAggregation()); assertThat(selector2.getDefaultAggregation(InstrumentType.OBSERVABLE_GAUGE)) .isEqualTo(Aggregation.defaultAggregation()); + assertThat(selector2.getDefaultAggregation(InstrumentType.GAUGE)) + .isEqualTo(Aggregation.defaultAggregation()); + } + + @Test + void stringRepresentation() { + assertThat(DefaultAggregationSelector.asString(DefaultAggregationSelector.getDefault())) + .isEqualTo( + "DefaultAggregationSelector{" + + "COUNTER=default, " + + "UP_DOWN_COUNTER=default, " + + "HISTOGRAM=default, " + + "OBSERVABLE_COUNTER=default, " + + "OBSERVABLE_UP_DOWN_COUNTER=default, " + + "OBSERVABLE_GAUGE=default, " + + "GAUGE=default" + + "}"); + assertThat( + DefaultAggregationSelector.asString( + DefaultAggregationSelector.getDefault() + .with(InstrumentType.HISTOGRAM, Aggregation.base2ExponentialBucketHistogram()))) + .isEqualTo( + "DefaultAggregationSelector{" + + "COUNTER=default, " + + "UP_DOWN_COUNTER=default, " + + "HISTOGRAM=base2_exponential_bucket_histogram, " + + "OBSERVABLE_COUNTER=default, " + + "OBSERVABLE_UP_DOWN_COUNTER=default, " + + "OBSERVABLE_GAUGE=default, " + + "GAUGE=default" + + "}"); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/export/PeriodicMetricReaderTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/export/PeriodicMetricReaderTest.java index 142e8816366..03c8d05cfaa 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/export/PeriodicMetricReaderTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/export/PeriodicMetricReaderTest.java @@ -26,7 +26,6 @@ import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; -import io.opentelemetry.sdk.metrics.internal.export.MetricProducer; import io.opentelemetry.sdk.resources.Resource; import java.io.IOException; import java.time.Duration; @@ -67,17 +66,19 @@ class PeriodicMetricReaderTest { ImmutableSumData.create( /* isMonotonic= */ true, AggregationTemporality.CUMULATIVE, LONG_POINT_LIST)); - @Mock private MetricProducer metricProducer; + @Mock private CollectionRegistration collectionRegistration; @Mock private MetricExporter metricExporter; @BeforeEach void setup() { - when(metricProducer.collectAllMetrics()).thenReturn(Collections.singletonList(METRIC_DATA)); + when(collectionRegistration.collectAllMetrics()) + .thenReturn(Collections.singletonList(METRIC_DATA)); } @Test @SuppressWarnings({"rawtypes", "unchecked"}) void startOnlyOnce() { + ScheduledExecutorService scheduler = mock(ScheduledExecutorService.class); ScheduledFuture mock = mock(ScheduledFuture.class); @@ -89,7 +90,7 @@ void startOnlyOnce() { .setExecutor(scheduler) .build(); - reader.register(metricProducer); + reader.register(collectionRegistration); verify(scheduler, times(1)).scheduleAtFixedRate(any(), anyLong(), anyLong(), any()); } @@ -102,7 +103,7 @@ void periodicExport() throws Exception { .setInterval(Duration.ofMillis(100)) .build(); - reader.register(metricProducer); + reader.register(collectionRegistration); try { assertThat(waitingMetricExporter.waitForNumberOfExports(1)) .containsExactly(Collections.singletonList(METRIC_DATA)); @@ -122,12 +123,12 @@ void periodicExport_NoMetricsSkipsExport() { PeriodicMetricReader.builder(waitingMetricExporter) .setInterval(Duration.ofMillis(100)) .build(); - when(metricProducer.collectAllMetrics()).thenReturn(Collections.emptyList()); - reader.register(metricProducer); + when(collectionRegistration.collectAllMetrics()).thenReturn(Collections.emptyList()); + reader.register(collectionRegistration); try { assertThat(reader.forceFlush().join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); - verify(metricProducer).collectAllMetrics(); + verify(collectionRegistration).collectAllMetrics(); assertThat(waitingMetricExporter.exportTimes.size()).isEqualTo(0); } finally { reader.shutdown(); @@ -142,7 +143,7 @@ void flush() throws Exception { .setInterval(Duration.ofNanos(Long.MAX_VALUE)) .build(); - reader.register(metricProducer); + reader.register(collectionRegistration); assertThat(reader.forceFlush().join(10, TimeUnit.SECONDS).isSuccess()).isTrue(); try { @@ -164,7 +165,7 @@ public void intervalExport_exporterThrowsException() throws Exception { .setInterval(Duration.ofMillis(100)) .build(); - reader.register(metricProducer); + reader.register(collectionRegistration); try { assertThat(waitingMetricExporter.waitForNumberOfExports(2)) .containsExactly( @@ -181,7 +182,7 @@ void shutdown_ExportsOneLastTime() throws Exception { PeriodicMetricReader.builder(waitingMetricExporter) .setInterval(Duration.ofSeconds(Integer.MAX_VALUE)) .build(); - reader.register(metricProducer); + reader.register(collectionRegistration); reader.shutdown(); // This export was called during shutdown. @@ -198,7 +199,7 @@ void close_CallsShutdown() throws IOException { PeriodicMetricReader.builder(new WaitingMetricExporter()) .setInterval(Duration.ofSeconds(Integer.MAX_VALUE)) .build()); - reader.register(metricProducer); + reader.register(collectionRegistration); reader.close(); verify(reader, times(1)).shutdown(); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramAggregatorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramAggregatorTest.java index cc405c5d8b1..e774939dd4c 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramAggregatorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramAggregatorTest.java @@ -14,16 +14,22 @@ import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricDataType; +import io.opentelemetry.sdk.metrics.internal.data.EmptyExponentialHistogramBuckets; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData; +import io.opentelemetry.sdk.metrics.internal.data.MutableExponentialHistogramBuckets; +import io.opentelemetry.sdk.metrics.internal.data.MutableExponentialHistogramPointData; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.resources.Resource; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -38,6 +44,7 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; import org.junit.jupiter.params.provider.MethodSource; import org.mockito.Mock; import org.mockito.Mockito; @@ -47,10 +54,9 @@ class DoubleBase2ExponentialHistogramAggregatorTest { @Mock ExemplarReservoir reservoir; + private DoubleBase2ExponentialHistogramAggregator aggregator; private static final int MAX_SCALE = 20; - private static final DoubleBase2ExponentialHistogramAggregator aggregator = - new DoubleBase2ExponentialHistogramAggregator(ExemplarReservoir::doubleNoSamples, 160, 20); private static final Resource RESOURCE = Resource.getDefault(); private static final InstrumentationScopeInfo INSTRUMENTATION_SCOPE_INFO = InstrumentationScopeInfo.empty(); @@ -58,10 +64,16 @@ class DoubleBase2ExponentialHistogramAggregatorTest { MetricDescriptor.create("name", "description", "unit"); private static Stream provideAggregator() { - return Stream.of( - aggregator, - new DoubleBase2ExponentialHistogramAggregator( - ExemplarReservoir::doubleNoSamples, 160, MAX_SCALE)); + List parameters = new ArrayList<>(); + for (MemoryMode memoryMode : MemoryMode.values()) { + parameters.add( + new DoubleBase2ExponentialHistogramAggregator( + ExemplarReservoir::doubleNoSamples, 160, 20, memoryMode)); + parameters.add( + new DoubleBase2ExponentialHistogramAggregator( + ExemplarReservoir::doubleNoSamples, 160, MAX_SCALE, memoryMode)); + } + return parameters.stream(); } private static int valueToIndex(int scale, double value) { @@ -69,26 +81,34 @@ private static int valueToIndex(int scale, double value) { return (int) Math.ceil(Math.log(value) * scaleFactor) - 1; } - @Test - void createHandle() { + private void initialize(MemoryMode memoryMode) { + aggregator = + new DoubleBase2ExponentialHistogramAggregator( + ExemplarReservoir::doubleNoSamples, 160, 20, memoryMode); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void createHandle(MemoryMode memoryMode) { + initialize(memoryMode); + AggregatorHandle handle = aggregator.createHandle(); assertThat(handle).isInstanceOf(DoubleBase2ExponentialHistogramAggregator.Handle.class); ExponentialHistogramPointData point = ((DoubleBase2ExponentialHistogramAggregator.Handle) handle) .doAggregateThenMaybeReset( 0, 1, Attributes.empty(), Collections.emptyList(), /* reset= */ true); - assertThat(point.getPositiveBuckets()) - .isInstanceOf( - DoubleBase2ExponentialHistogramAggregator.EmptyExponentialHistogramBuckets.class); + assertThat(point.getPositiveBuckets()).isInstanceOf(EmptyExponentialHistogramBuckets.class); assertThat(point.getPositiveBuckets().getScale()).isEqualTo(MAX_SCALE); - assertThat(point.getNegativeBuckets()) - .isInstanceOf( - DoubleBase2ExponentialHistogramAggregator.EmptyExponentialHistogramBuckets.class); + assertThat(point.getNegativeBuckets()).isInstanceOf(EmptyExponentialHistogramBuckets.class); assertThat(point.getNegativeBuckets().getScale()).isEqualTo(MAX_SCALE); } - @Test - void testRecordings() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testRecordings(MemoryMode memoryMode) { + initialize(memoryMode); + AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordDouble(0.5); @@ -130,11 +150,14 @@ void testRecordings() { assertThat(negativeCounts.get(valueToIndex(expectedScale, 1.0) - negOffset)).isEqualTo(1); } - @Test - void testInvalidRecording() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testInvalidRecording(MemoryMode memoryMode) { + initialize(memoryMode); + AggregatorHandle aggregatorHandle = aggregator.createHandle(); - // Non finite recordings should be ignored + // Non-finite recordings should be ignored aggregatorHandle.recordDouble(Double.POSITIVE_INFINITY); aggregatorHandle.recordDouble(Double.NEGATIVE_INFINITY); aggregatorHandle.recordDouble(Double.NaN); @@ -192,10 +215,11 @@ void testRecordingsAtLimits(DoubleBase2ExponentialHistogramAggregator aggregator .isEqualTo(Double.POSITIVE_INFINITY); } - @Test - void aggregateThenMaybeReset_WithExemplars() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void aggregateThenMaybeReset_WithExemplars(MemoryMode memoryMode) { DoubleBase2ExponentialHistogramAggregator agg = - new DoubleBase2ExponentialHistogramAggregator(() -> reservoir, 160, MAX_SCALE); + new DoubleBase2ExponentialHistogramAggregator(() -> reservoir, 160, MAX_SCALE, memoryMode); Attributes attributes = Attributes.builder().put("test", "value").build(); DoubleExemplarData exemplar = @@ -223,8 +247,11 @@ void aggregateThenMaybeReset_WithExemplars() { .isEqualTo(exemplars); } - @Test - void aggregateThenMaybeReset() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void aggregateThenMaybeReset(MemoryMode memoryMode) { + initialize(memoryMode); + AggregatorHandle aggregatorHandle = aggregator.createHandle(); @@ -238,8 +265,11 @@ void aggregateThenMaybeReset() { .isEqualTo(Collections.singletonList(1L)); } - @Test - void testInsert1M() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testInsert1M(MemoryMode memoryMode) { + initialize(memoryMode); + AggregatorHandle handle = aggregator.createHandle(); @@ -261,8 +291,11 @@ void testInsert1M() { assertThat(point.getPositiveBuckets().getTotalCount()).isEqualTo(n); } - @Test - void testDownScale() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testDownScale(MemoryMode memoryMode) { + initialize(memoryMode); + DoubleBase2ExponentialHistogramAggregator.Handle handle = (DoubleBase2ExponentialHistogramAggregator.Handle) aggregator.createHandle(); // record a measurement to initialize positive buckets @@ -289,8 +322,9 @@ void testDownScale() { assertThat(buckets.getTotalCount()).isEqualTo(5); } - @Test - void testToMetricData() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testToMetricData(MemoryMode memoryMode) { Attributes attributes = Attributes.builder().put("test", "value").build(); DoubleExemplarData exemplar = ImmutableDoubleExemplarData.create( @@ -310,7 +344,8 @@ void testToMetricData() { Mockito.when(reservoirSupplier.get()).thenReturn(reservoir); DoubleBase2ExponentialHistogramAggregator cumulativeAggregator = - new DoubleBase2ExponentialHistogramAggregator(reservoirSupplier, 160, MAX_SCALE); + new DoubleBase2ExponentialHistogramAggregator( + reservoirSupplier, 160, MAX_SCALE, memoryMode); AggregatorHandle aggregatorHandle = cumulativeAggregator.createHandle(); @@ -373,8 +408,11 @@ void testToMetricData() { .isEqualTo(AggregationTemporality.DELTA); } - @Test - void testMultithreadedUpdates() throws InterruptedException { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testMultithreadedUpdates(MemoryMode memoryMode) throws InterruptedException { + initialize(memoryMode); + AggregatorHandle aggregatorHandle = aggregator.createHandle(); ImmutableList updates = ImmutableList.of(0D, 0.1D, -0.1D, 1D, -1D, 100D); @@ -440,4 +478,92 @@ void testMultithreadedUpdates() throws InterruptedException { valueToIndex(point.getScale(), 1) - point.getPositiveBuckets().getOffset())) .isEqualTo(numberOfUpdates); } + + @Test + public void verifyMutableDataUsedInReusableDataMemoryMode() { + initialize(MemoryMode.REUSABLE_DATA); + + DoubleBase2ExponentialHistogramAggregator.Handle handle = + (DoubleBase2ExponentialHistogramAggregator.Handle) aggregator.createHandle(); + + // record a measurement to initialize positive buckets + handle.recordDouble(0.5); + // record a measurement to initialize negative buckets + handle.recordDouble(-13.2); + + ExponentialHistogramPointData point = + Objects.requireNonNull( + handle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false)); + + assertThat(point).isInstanceOf(MutableExponentialHistogramPointData.class); + assertThat(point.getPositiveBuckets()).isInstanceOf(MutableExponentialHistogramBuckets.class); + assertThat(point.getNegativeBuckets()).isInstanceOf(MutableExponentialHistogramBuckets.class); + assertThat(point.getPositiveBuckets().getBucketCounts()).isNotEmpty(); + assertThat(point.getNegativeBuckets().getBucketCounts()).isNotEmpty(); + + handle.recordDouble(0.6); + handle.recordDouble(-16.3); + + ExponentialHistogramPointData secondAggregatePoint = + Objects.requireNonNull( + handle.aggregateThenMaybeReset(1, 2, Attributes.empty(), /* reset= */ false)); + + // Mutable point should be reused across collections. + assertThat(secondAggregatePoint).isSameAs(point); + } + + @Test + public void verifyImmutableDataUsedInImmutableDataMemoryMode() { + initialize(MemoryMode.IMMUTABLE_DATA); + + DoubleBase2ExponentialHistogramAggregator.Handle handle = + (DoubleBase2ExponentialHistogramAggregator.Handle) aggregator.createHandle(); + + // record a measurement to initialize positive buckets + handle.recordDouble(0.5); + // record a measurement to initialize negative buckets + handle.recordDouble(-13.2); + + ExponentialHistogramPointData point = + Objects.requireNonNull( + handle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false)); + + assertThat(point).isInstanceOf(ImmutableExponentialHistogramPointData.class); + assertThat(point.getPositiveBuckets()) + .isInstanceOf(DoubleBase2ExponentialHistogramBuckets.class); + assertThat(point.getNegativeBuckets()) + .isInstanceOf(DoubleBase2ExponentialHistogramBuckets.class); + } + + @Test + public void reusablePoint_emptyFirstThenRecordAndCheck() { + initialize(MemoryMode.REUSABLE_DATA); + + DoubleBase2ExponentialHistogramAggregator.Handle handle = + (DoubleBase2ExponentialHistogramAggregator.Handle) aggregator.createHandle(); + + // Let's create a point without buckets + ExponentialHistogramPointData point = + Objects.requireNonNull( + handle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false)); + + assertThat(point).isInstanceOf(MutableExponentialHistogramPointData.class); + assertThat(point.getPositiveBuckets()).isInstanceOf(EmptyExponentialHistogramBuckets.class); + assertThat(point.getNegativeBuckets()).isInstanceOf(EmptyExponentialHistogramBuckets.class); + + // record a measurement to initialize positive buckets + handle.recordDouble(0.5); + // record a measurement to initialize negative buckets + handle.recordDouble(-13.2); + + point = + Objects.requireNonNull( + handle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false)); + + assertThat(point).isInstanceOf(MutableExponentialHistogramPointData.class); + assertThat(point.getPositiveBuckets()).isInstanceOf(MutableExponentialHistogramBuckets.class); + assertThat(point.getNegativeBuckets()).isInstanceOf(MutableExponentialHistogramBuckets.class); + assertThat(point.getPositiveBuckets().getBucketCounts()).isNotEmpty(); + assertThat(point.getNegativeBuckets().getBucketCounts()).isNotEmpty(); + } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramBucketsTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramBucketsTest.java index b839892da80..fa6bcc255f4 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramBucketsTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleBase2ExponentialHistogramBucketsTest.java @@ -8,9 +8,13 @@ import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList; import java.util.Arrays; import java.util.Collections; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; /** * These are extra test cases for buckets. Much of this class is already tested via more complex @@ -18,11 +22,12 @@ */ class DoubleBase2ExponentialHistogramBucketsTest { - @Test - void record_Valid() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void record_Valid(MemoryMode memoryMode) { // Can only effectively test recording of one value here due to downscaling required. // More complex recording/downscaling operations are tested in the aggregator. - DoubleBase2ExponentialHistogramBuckets b = newBuckets(); + DoubleBase2ExponentialHistogramBuckets b = newBuckets(memoryMode); b.record(1); b.record(1); b.record(1); @@ -30,15 +35,17 @@ void record_Valid() { assertThat(b.getBucketCounts()).isEqualTo(Collections.singletonList(3L)); } - @Test - void record_Zero_Throws() { - DoubleBase2ExponentialHistogramBuckets b = newBuckets(); + @ParameterizedTest + @EnumSource(MemoryMode.class) + void record_Zero_Throws(MemoryMode memoryMode) { + DoubleBase2ExponentialHistogramBuckets b = newBuckets(memoryMode); assertThatThrownBy(() -> b.record(0)).isInstanceOf(IllegalStateException.class); } - @Test - void downscale_Valid() { - DoubleBase2ExponentialHistogramBuckets b = newBuckets(); + @ParameterizedTest + @EnumSource(MemoryMode.class) + void downscale_Valid(MemoryMode memoryMode) { + DoubleBase2ExponentialHistogramBuckets b = newBuckets(memoryMode); b.downscale(20); // scale of zero is easy to reason with without a calculator b.record(1); b.record(2); @@ -49,16 +56,18 @@ void downscale_Valid() { assertThat(b.getOffset()).isEqualTo(-1); } - @Test - void downscale_NegativeIncrement_Throws() { - DoubleBase2ExponentialHistogramBuckets b = newBuckets(); + @ParameterizedTest + @EnumSource(MemoryMode.class) + void downscale_NegativeIncrement_Throws(MemoryMode memoryMode) { + DoubleBase2ExponentialHistogramBuckets b = newBuckets(memoryMode); assertThatThrownBy(() -> b.downscale(-1)).isInstanceOf(IllegalStateException.class); } - @Test - void equalsAndHashCode() { - DoubleBase2ExponentialHistogramBuckets a = newBuckets(); - DoubleBase2ExponentialHistogramBuckets b = newBuckets(); + @ParameterizedTest + @EnumSource(MemoryMode.class) + void equalsAndHashCode(MemoryMode memoryMode) { + DoubleBase2ExponentialHistogramBuckets a = newBuckets(memoryMode); + DoubleBase2ExponentialHistogramBuckets b = newBuckets(memoryMode); assertThat(a).isNotNull(); assertThat(b).isEqualTo(a); @@ -76,9 +85,9 @@ void equalsAndHashCode() { assertThat(a).hasSameHashCodeAs(b); // Now we start to play with altering offset, but having same effective counts. - DoubleBase2ExponentialHistogramBuckets empty = newBuckets(); + DoubleBase2ExponentialHistogramBuckets empty = newBuckets(memoryMode); empty.downscale(20); - DoubleBase2ExponentialHistogramBuckets c = newBuckets(); + DoubleBase2ExponentialHistogramBuckets c = newBuckets(memoryMode); c.downscale(20); assertThat(c.record(1)).isTrue(); // Record can fail if scale is not set correctly. @@ -86,17 +95,85 @@ void equalsAndHashCode() { assertThat(c.getTotalCount()).isEqualTo(2); } - @Test - void toString_Valid() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void toString_Valid(MemoryMode memoryMode) { // Note this test may break once difference implementations for counts are developed since // the counts may have different toStrings(). - DoubleBase2ExponentialHistogramBuckets b = newBuckets(); + DoubleBase2ExponentialHistogramBuckets b = newBuckets(memoryMode); b.record(1); assertThat(b.toString()) .isEqualTo("DoubleExponentialHistogramBuckets{scale: 20, offset: -1, counts: {-1=1} }"); } - private static DoubleBase2ExponentialHistogramBuckets newBuckets() { - return new DoubleBase2ExponentialHistogramBuckets(20, 160); + @Test + void testGetBucketCountsWithReusableList() { + // Can only effectively test recording of one value here due to downscaling required. + // More complex recording/downscaling operations are tested in the aggregator. + DoubleBase2ExponentialHistogramBuckets b = newBuckets(MemoryMode.REUSABLE_DATA); + b.record(1); + b.record(1); + b.record(1); + assertThat(b.getBucketCounts()).isEqualTo(Collections.singletonList(3L)); + + DynamicPrimitiveLongList bucketCounts = DynamicPrimitiveLongList.empty(); + b.getBucketCountsIntoReusableList(bucketCounts); + assertThat(bucketCounts).isEqualTo(Collections.singletonList(3L)); + } + + @Test + public void testGetBucketCountsWithReusableListWithEmptyCounts() { + // Can only effectively test recording of one value here due to downscaling required. + // More complex recording/downscaling operations are tested in the aggregator. + DoubleBase2ExponentialHistogramBuckets b = newBuckets(MemoryMode.REUSABLE_DATA); + assertThat(b.getBucketCounts()).isEmpty(); + + DynamicPrimitiveLongList bucketCounts = DynamicPrimitiveLongList.empty(); + b.getBucketCountsIntoReusableList(bucketCounts); + assertThat(bucketCounts).isEmpty(); + } + + @Test + public void testDownScaleReusableCountIsOkWhenUsedForSecondTime() { + DoubleBase2ExponentialHistogramBuckets immutableDataBasedBuckets = + newBuckets(MemoryMode.IMMUTABLE_DATA); + immutableDataBasedBuckets.record(0.5); + immutableDataBasedBuckets.record(1); + immutableDataBasedBuckets.record(10); + immutableDataBasedBuckets.downscale( + 2); // scale of zero is easy to reason with without a calculator + + DoubleBase2ExponentialHistogramBuckets reusableDataBasedBuckets = + newBuckets(MemoryMode.REUSABLE_DATA); + reusableDataBasedBuckets.record(0.5); + reusableDataBasedBuckets.record(1); + reusableDataBasedBuckets.record(10); + reusableDataBasedBuckets.downscale( + 2); // scale of zero is easy to reason with without a calculator + + assertThat(immutableDataBasedBuckets.getScale()).isEqualTo(reusableDataBasedBuckets.getScale()); + assertThat(immutableDataBasedBuckets.getTotalCount()) + .isEqualTo(reusableDataBasedBuckets.getTotalCount()); + assertThat(immutableDataBasedBuckets.getBucketCounts()) + .isEqualTo(reusableDataBasedBuckets.getBucketCounts()); + assertThat(immutableDataBasedBuckets.getOffset()) + .isEqualTo(reusableDataBasedBuckets.getOffset()); + + immutableDataBasedBuckets.downscale( + 3); // scale of zero is easy to reason with without a calculator + reusableDataBasedBuckets.downscale( + 3); // scale of zero is easy to reason with without a calculator + + assertThat(immutableDataBasedBuckets.getScale()).isEqualTo(reusableDataBasedBuckets.getScale()); + assertThat(immutableDataBasedBuckets.getTotalCount()) + .isEqualTo(reusableDataBasedBuckets.getTotalCount()); + assertThat(immutableDataBasedBuckets.getBucketCounts()) + .isEqualTo(reusableDataBasedBuckets.getBucketCounts()); + assertThat(immutableDataBasedBuckets.getOffset()) + .isEqualTo(reusableDataBasedBuckets.getOffset()); + } + + private static DoubleBase2ExponentialHistogramBuckets newBuckets(MemoryMode memoryMode) { + return new DoubleBase2ExponentialHistogramBuckets(20, 160, memoryMode); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExplicitBucketHistogramAggregatorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExplicitBucketHistogramAggregatorTest.java index 440e70b3970..f10787c7197 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExplicitBucketHistogramAggregatorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExplicitBucketHistogramAggregatorTest.java @@ -14,6 +14,7 @@ import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.HistogramPointData; @@ -21,6 +22,7 @@ import io.opentelemetry.sdk.metrics.data.MetricDataType; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData; +import io.opentelemetry.sdk.metrics.internal.data.MutableHistogramPointData; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.resources.Resource; @@ -34,6 +36,8 @@ import java.util.stream.DoubleStream; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; @@ -51,17 +55,26 @@ class DoubleExplicitBucketHistogramAggregatorTest { InstrumentationScopeInfo.empty(); private static final MetricDescriptor METRIC_DESCRIPTOR = MetricDescriptor.create("name", "description", "unit"); - private static final DoubleExplicitBucketHistogramAggregator aggregator = - new DoubleExplicitBucketHistogramAggregator(boundaries, ExemplarReservoir::doubleNoSamples); + private DoubleExplicitBucketHistogramAggregator aggregator; - @Test - void createHandle() { + private void init(MemoryMode memoryMode) { + aggregator = + new DoubleExplicitBucketHistogramAggregator( + boundaries, ExemplarReservoir::doubleNoSamples, memoryMode); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void createHandle(MemoryMode memoryMode) { + init(memoryMode); assertThat(aggregator.createHandle()) .isInstanceOf(DoubleExplicitBucketHistogramAggregator.Handle.class); } - @Test - void testRecordings() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testRecordings(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordLong(20); @@ -84,8 +97,9 @@ void testRecordings() { Arrays.asList(1L, 1L, 1L, 1L))); } - @Test - void aggregateThenMaybeReset_WithExemplars() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void aggregateThenMaybeReset_WithExemplars(MemoryMode memoryMode) { Attributes attributes = Attributes.builder().put("test", "value").build(); DoubleExemplarData exemplar = ImmutableDoubleExemplarData.create( @@ -100,7 +114,7 @@ void aggregateThenMaybeReset_WithExemplars() { List exemplars = Collections.singletonList(exemplar); Mockito.when(reservoir.collectAndReset(Attributes.empty())).thenReturn(exemplars); DoubleExplicitBucketHistogramAggregator aggregator = - new DoubleExplicitBucketHistogramAggregator(boundaries, () -> reservoir); + new DoubleExplicitBucketHistogramAggregator(boundaries, () -> reservoir, memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordDouble(0, attributes, Context.root()); @@ -121,8 +135,10 @@ void aggregateThenMaybeReset_WithExemplars() { exemplars)); } - @Test - void aggregateThenMaybeReset() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void aggregateThenMaybeReset(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); @@ -159,8 +175,10 @@ void aggregateThenMaybeReset() { Arrays.asList(1L, 0L, 0L, 0L))); } - @Test - void toMetricData() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void toMetricData(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordLong(10); @@ -180,8 +198,10 @@ void toMetricData() { .isEqualTo(AggregationTemporality.DELTA); } - @Test - void toMetricDataWithExemplars() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void toMetricDataWithExemplars(MemoryMode memoryMode) { + init(memoryMode); Attributes attributes = Attributes.builder().put("test", "value").build(); DoubleExemplarData exemplar = ImmutableDoubleExemplarData.create( @@ -226,8 +246,10 @@ void toMetricDataWithExemplars() { .hasExemplars(exemplar))); } - @Test - void testHistogramCounts() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testHistogramCounts(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordDouble(1.1); @@ -237,8 +259,10 @@ void testHistogramCounts() { assertThat(point.getCounts().size()).isEqualTo(boundaries.length + 1); } - @Test - void testMultithreadedUpdates() throws InterruptedException { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void testMultithreadedUpdates(MemoryMode memoryMode) throws InterruptedException { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); ImmutableList updates = ImmutableList.of(1L, 2L, 3L, 5L, 7L, 11L, 13L, 17L, 19L, 23L); @@ -278,4 +302,28 @@ void testMultithreadedUpdates() throws InterruptedException { boundariesList, Arrays.asList(50000L, 50000L, 0L, 0L))); } + + @Test + void testReusableDataMemoryMode() { + init(MemoryMode.REUSABLE_DATA); + AggregatorHandle aggregatorHandle = + aggregator.createHandle(); + aggregatorHandle.recordLong(10); + aggregatorHandle.recordLong(20); + aggregatorHandle.recordLong(30); + aggregatorHandle.recordLong(40); + + HistogramPointData pointData = + aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false); + assertThat(pointData).isExactlyInstanceOf(MutableHistogramPointData.class); + + aggregatorHandle.recordLong(10); + aggregatorHandle.recordLong(20); + + HistogramPointData anotherPointData = + aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false); + + // The point data instance should be reused + assertThat(anotherPointData).isSameAs(pointData); + } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleLastValueAggregatorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleLastValueAggregatorTest.java index 2decaf97821..b28a8e4879c 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleLastValueAggregatorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleLastValueAggregatorTest.java @@ -7,23 +7,28 @@ import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.api.trace.TraceFlags; import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoublePointData; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; +import io.opentelemetry.sdk.metrics.internal.data.MutableDoublePointData; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.resources.Resource; import java.util.Collections; import java.util.List; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; /** Unit tests for {@link AggregatorHandle}. */ class DoubleLastValueAggregatorTest { @@ -32,16 +37,24 @@ class DoubleLastValueAggregatorTest { InstrumentationScopeInfo.empty(); private static final MetricDescriptor METRIC_DESCRIPTOR = MetricDescriptor.create("name", "description", "unit"); - private static final DoubleLastValueAggregator aggregator = - new DoubleLastValueAggregator(ExemplarReservoir::doubleNoSamples); + private DoubleLastValueAggregator aggregator; - @Test - void createHandle() { + private void init(MemoryMode memoryMode) { + aggregator = new DoubleLastValueAggregator(ExemplarReservoir::doubleNoSamples, memoryMode); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void createHandle(MemoryMode memoryMode) { + init(memoryMode); assertThat(aggregator.createHandle()).isInstanceOf(DoubleLastValueAggregator.Handle.class); } - @Test - void multipleRecords() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void multipleRecords(MemoryMode memoryMode) { + init(memoryMode); + AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordDouble(12.1); @@ -59,8 +72,11 @@ void multipleRecords() { .isEqualTo(14.1); } - @Test - void aggregateThenMaybeReset() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void aggregateThenMaybeReset(MemoryMode memoryMode) { + init(memoryMode); + AggregatorHandle aggregatorHandle = aggregator.createHandle(); @@ -79,8 +95,11 @@ void aggregateThenMaybeReset() { .isEqualTo(12.1); } - @Test - void diff() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void diff(MemoryMode memoryMode) { + init(memoryMode); + Attributes attributes = Attributes.builder().put("test", "value").build(); DoubleExemplarData exemplar = ImmutableDoubleExemplarData.create( @@ -113,8 +132,102 @@ void diff() { .isEqualTo(ImmutableDoublePointData.create(0, 1, Attributes.empty(), 2, exemplars)); } - @Test - void toMetricData() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void diffInPlace(MemoryMode memoryMode) { + init(memoryMode); + + Attributes attributes = Attributes.builder().put("test", "value").build(); + DoubleExemplarData exemplar = + ImmutableDoubleExemplarData.create( + attributes, + 2L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 1); + List exemplars = Collections.singletonList(exemplar); + List previousExemplars = + Collections.singletonList( + ImmutableDoubleExemplarData.create( + attributes, + 1L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 2)); + + MutableDoublePointData previous = new MutableDoublePointData(); + MutableDoublePointData current = new MutableDoublePointData(); + + previous.set(0, 1, Attributes.empty(), 1, previousExemplars); + current.set(0, 1, Attributes.empty(), 2, exemplars); + + aggregator.diffInPlace(previous, current); + + /* Assert that latest measurement is kept and set on {@code previous} */ + assertThat(previous.getStartEpochNanos()).isEqualTo(0); + assertThat(previous.getEpochNanos()).isEqualTo(1); + assertThat(previous.getAttributes()).isEqualTo(Attributes.empty()); + assertThat(previous.getValue()).isEqualTo(2); + assertThat(previous.getExemplars()).isEqualTo(exemplars); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void copyPoint(MemoryMode memoryMode) { + init(memoryMode); + + MutableDoublePointData pointData = (MutableDoublePointData) aggregator.createReusablePoint(); + + Attributes attributes = Attributes.of(AttributeKey.longKey("test"), 100L); + List examplarsFrom = + Collections.singletonList( + ImmutableDoubleExemplarData.create( + attributes, + 2L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 1)); + pointData.set(0, 1, attributes, 2000, examplarsFrom); + + MutableDoublePointData toPointData = (MutableDoublePointData) aggregator.createReusablePoint(); + + Attributes toAttributes = Attributes.of(AttributeKey.longKey("test"), 100L); + List examplarsTo = + Collections.singletonList( + ImmutableDoubleExemplarData.create( + attributes, + 4L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 2)); + toPointData.set(0, 2, toAttributes, 4000, examplarsTo); + + aggregator.copyPoint(pointData, toPointData); + + assertThat(toPointData.getStartEpochNanos()).isEqualTo(pointData.getStartEpochNanos()); + assertThat(toPointData.getEpochNanos()).isEqualTo(pointData.getEpochNanos()); + assertThat(toPointData.getAttributes()).isEqualTo(pointData.getAttributes()); + assertThat(toPointData.getValue()).isEqualTo(pointData.getValue()); + assertThat(toPointData.getExemplars()).isEqualTo(pointData.getExemplars()); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void toMetricData(MemoryMode memoryMode) { + init(memoryMode); + AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordDouble(10); @@ -142,4 +255,25 @@ void toMetricData() { .hasEpochNanos(100) .hasValue(10))); } + + @Test + void testReusableDataOnCollect() { + init(MemoryMode.REUSABLE_DATA); + AggregatorHandle handle = aggregator.createHandle(); + handle.recordDouble(1); + DoublePointData pointData = + handle.aggregateThenMaybeReset(0, 10, Attributes.empty(), /* reset= */ false); + + handle.recordDouble(1); + DoublePointData pointData2 = + handle.aggregateThenMaybeReset(0, 10, Attributes.empty(), /* reset= */ false); + + assertThat(pointData).isSameAs(pointData2); + + handle.recordDouble(1); + DoublePointData pointDataWithReset = + handle.aggregateThenMaybeReset(0, 10, Attributes.empty(), /* reset= */ true); + + assertThat(pointData).isSameAs(pointDataWithReset); + } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleSumAggregatorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleSumAggregatorTest.java index 15377186150..6b4715dd729 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleSumAggregatorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleSumAggregatorTest.java @@ -7,12 +7,14 @@ import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.api.trace.TraceFlags; import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.InstrumentValueType; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; @@ -21,6 +23,7 @@ import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; +import io.opentelemetry.sdk.metrics.internal.data.MutableDoublePointData; import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; @@ -30,6 +33,8 @@ import java.util.List; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; @@ -45,24 +50,33 @@ class DoubleSumAggregatorTest { private static final MetricDescriptor metricDescriptor = MetricDescriptor.create("name", "description", "unit"); - private static final DoubleSumAggregator aggregator = - new DoubleSumAggregator( - InstrumentDescriptor.create( - "instrument_name", - "instrument_description", - "instrument_unit", - InstrumentType.COUNTER, - InstrumentValueType.DOUBLE, - Advice.empty()), - ExemplarReservoir::doubleNoSamples); + private DoubleSumAggregator aggregator; - @Test - void createHandle() { + private void init(MemoryMode memoryMode) { + aggregator = + new DoubleSumAggregator( + InstrumentDescriptor.create( + "instrument_name", + "instrument_description", + "instrument_unit", + InstrumentType.COUNTER, + InstrumentValueType.DOUBLE, + Advice.empty()), + ExemplarReservoir::doubleNoSamples, + memoryMode); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void createHandle(MemoryMode memoryMode) { + init(memoryMode); assertThat(aggregator.createHandle()).isInstanceOf(DoubleSumAggregator.Handle.class); } - @Test - void multipleRecords() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void multipleRecords(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordDouble(12.1); @@ -77,8 +91,10 @@ void multipleRecords() { .isEqualTo(12.1 * 5); } - @Test - void multipleRecords_WithNegatives() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void multipleRecords_WithNegatives(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordDouble(12); @@ -94,8 +110,10 @@ void multipleRecords_WithNegatives() { .isEqualTo(14); } - @Test - void aggregateThenMaybeReset() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void aggregateThenMaybeReset(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); @@ -116,8 +134,9 @@ void aggregateThenMaybeReset() { .isEqualTo(-13); } - @Test - void aggregateThenMaybeReset_WithExemplars() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void aggregateThenMaybeReset_WithExemplars(MemoryMode memoryMode) { Attributes attributes = Attributes.builder().put("test", "value").build(); DoubleExemplarData exemplar = ImmutableDoubleExemplarData.create( @@ -140,7 +159,8 @@ void aggregateThenMaybeReset_WithExemplars() { InstrumentType.COUNTER, InstrumentValueType.DOUBLE, Advice.empty()), - () -> reservoir); + () -> reservoir, + memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordDouble(0, attributes, Context.root()); @@ -149,8 +169,9 @@ void aggregateThenMaybeReset_WithExemplars() { .isEqualTo(ImmutableDoublePointData.create(0, 1, Attributes.empty(), 0, exemplars)); } - @Test - void mergeAndDiff() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void mergeAndDiff(MemoryMode memoryMode) { Attributes attributes = Attributes.builder().put("test", "value").build(); DoubleExemplarData exemplar = ImmutableDoubleExemplarData.create( @@ -174,7 +195,8 @@ void mergeAndDiff() { instrumentType, InstrumentValueType.LONG, Advice.empty()), - ExemplarReservoir::doubleNoSamples); + ExemplarReservoir::doubleNoSamples, + memoryMode); DoublePointData diffed = aggregator.diff( @@ -190,8 +212,99 @@ void mergeAndDiff() { } } - @Test - void toMetricData() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void diffInPlace(MemoryMode memoryMode) { + init(memoryMode); + Attributes attributes = Attributes.builder().put("test", "value").build(); + DoubleExemplarData exemplar = + ImmutableDoubleExemplarData.create( + attributes, + 2L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 1); + List exemplars = Collections.singletonList(exemplar); + List previousExemplars = + Collections.singletonList( + ImmutableDoubleExemplarData.create( + attributes, + 1L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 2)); + + MutableDoublePointData previous = new MutableDoublePointData(); + MutableDoublePointData current = new MutableDoublePointData(); + + previous.set(0, 1, Attributes.empty(), 1, previousExemplars); + current.set(0, 1, Attributes.empty(), 3, exemplars); + + aggregator.diffInPlace(previous, current); + + /* Assert that latest measurement is kept and set on {@code previous} */ + assertThat(previous.getStartEpochNanos()).isEqualTo(current.getStartEpochNanos()); + assertThat(previous.getEpochNanos()).isEqualTo(current.getEpochNanos()); + assertThat(previous.getAttributes()).isEqualTo(current.getAttributes()); + assertThat(previous.getValue()).isEqualTo(2); + assertThat(previous.getExemplars()).isEqualTo(exemplars); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void copyPoint(MemoryMode memoryMode) { + init(memoryMode); + MutableDoublePointData pointData = (MutableDoublePointData) aggregator.createReusablePoint(); + + Attributes attributes = Attributes.of(AttributeKey.longKey("test"), 100L); + List examplarsFrom = + Collections.singletonList( + ImmutableDoubleExemplarData.create( + attributes, + 2L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 1)); + pointData.set(0, 1, attributes, 2000, examplarsFrom); + + MutableDoublePointData toPointData = (MutableDoublePointData) aggregator.createReusablePoint(); + + Attributes toAttributes = Attributes.of(AttributeKey.longKey("test"), 100L); + List examplarsTo = + Collections.singletonList( + ImmutableDoubleExemplarData.create( + attributes, + 4L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 2)); + toPointData.set(0, 2, toAttributes, 4000, examplarsTo); + + aggregator.copyPoint(pointData, toPointData); + + assertThat(toPointData.getStartEpochNanos()).isEqualTo(pointData.getStartEpochNanos()); + assertThat(toPointData.getEpochNanos()).isEqualTo(pointData.getEpochNanos()); + assertThat(toPointData.getAttributes()).isEqualTo(pointData.getAttributes()); + assertThat(toPointData.getValue()).isEqualTo(pointData.getValue()); + assertThat(toPointData.getExemplars()).isEqualTo(pointData.getExemplars()); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void toMetricData(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordDouble(10); @@ -222,8 +335,10 @@ void toMetricData() { .hasValue(10))); } - @Test - void toMetricDataWithExemplars() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void toMetricDataWithExemplars(MemoryMode memoryMode) { + init(memoryMode); Attributes attributes = Attributes.builder().put("test", "value").build(); DoubleExemplarData exemplar = ImmutableDoubleExemplarData.create( @@ -247,4 +362,22 @@ void toMetricDataWithExemplars() { .hasDoubleSumSatisfying( sum -> sum.hasPointsSatisfying(point -> point.hasValue(1).hasExemplars(exemplar))); } + + @Test + void sameObjectReturnedOnReusableDataMemoryMode() { + init(MemoryMode.REUSABLE_DATA); + AggregatorHandle aggregatorHandle = + aggregator.createHandle(); + aggregatorHandle.recordDouble(1.0); + + DoublePointData firstCollection = + aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false); + + aggregatorHandle.recordDouble(1.0); + DoublePointData secondCollection = + aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false); + + // Should be same object since we are in REUSABLE_DATA mode. + assertThat(firstCollection).isSameAs(secondCollection); + } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongLastValueAggregatorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongLastValueAggregatorTest.java index 7eda2d345c4..b9dfc386156 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongLastValueAggregatorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongLastValueAggregatorTest.java @@ -7,20 +7,30 @@ import static org.assertj.core.api.Assertions.assertThat; +import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.TraceFlags; +import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.LongExemplarData; import io.opentelemetry.sdk.metrics.data.LongPointData; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongExemplarData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; +import io.opentelemetry.sdk.metrics.internal.data.MutableLongPointData; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.resources.Resource; import java.util.Collections; +import java.util.List; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; /** Unit tests for {@link LongLastValueAggregator}. */ class LongLastValueAggregatorTest { @@ -29,16 +39,23 @@ class LongLastValueAggregatorTest { InstrumentationScopeInfo.empty(); private static final MetricDescriptor METRIC_DESCRIPTOR = MetricDescriptor.create("name", "description", "unit"); - private static final LongLastValueAggregator aggregator = - new LongLastValueAggregator(ExemplarReservoir::longNoSamples); + private LongLastValueAggregator aggregator; - @Test - void createHandle() { + private void init(MemoryMode memoryMode) { + aggregator = new LongLastValueAggregator(ExemplarReservoir::longNoSamples, memoryMode); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void createHandle(MemoryMode memoryMode) { + init(memoryMode); assertThat(aggregator.createHandle()).isInstanceOf(LongLastValueAggregator.Handle.class); } - @Test - void multipleRecords() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void multipleRecords(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordLong(12); assertThat( @@ -55,8 +72,10 @@ void multipleRecords() { .isEqualTo(14L); } - @Test - void aggregateThenMaybeReset() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void aggregateThenMaybeReset(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordLong(13); @@ -74,8 +93,102 @@ void aggregateThenMaybeReset() { .isEqualTo(12L); } - @Test - void toMetricData() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void diffInPlace(MemoryMode memoryMode) { + init(memoryMode); + + Attributes attributes = Attributes.builder().put("test", "value").build(); + LongExemplarData exemplar = + ImmutableLongExemplarData.create( + attributes, + 2L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 1); + List exemplars = Collections.singletonList(exemplar); + List previousExemplars = + Collections.singletonList( + ImmutableLongExemplarData.create( + attributes, + 1L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 2)); + + MutableLongPointData previous = new MutableLongPointData(); + MutableLongPointData current = new MutableLongPointData(); + + previous.set(0, 1, Attributes.empty(), 1, previousExemplars); + current.set(0, 1, Attributes.empty(), 2, exemplars); + + aggregator.diffInPlace(previous, current); + + /* Assert that latest measurement is kept and set on {@code previous} */ + assertThat(previous.getStartEpochNanos()).isEqualTo(0); + assertThat(previous.getEpochNanos()).isEqualTo(1); + assertThat(previous.getAttributes()).isEqualTo(Attributes.empty()); + assertThat(previous.getValue()).isEqualTo(2); + assertThat(previous.getExemplars()).isEqualTo(exemplars); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void copyPoint(MemoryMode memoryMode) { + init(memoryMode); + + MutableLongPointData pointData = (MutableLongPointData) aggregator.createReusablePoint(); + + Attributes attributes = Attributes.of(AttributeKey.longKey("test"), 100L); + List examplarsFrom = + Collections.singletonList( + ImmutableLongExemplarData.create( + attributes, + 2L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 1)); + pointData.set(0, 1, attributes, 2000, examplarsFrom); + + MutableLongPointData toPointData = (MutableLongPointData) aggregator.createReusablePoint(); + + Attributes toAttributes = Attributes.of(AttributeKey.longKey("test"), 100L); + List examplarsTo = + Collections.singletonList( + ImmutableLongExemplarData.create( + attributes, + 4L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 2)); + toPointData.set(0, 2, toAttributes, 4000, examplarsTo); + + aggregator.copyPoint(pointData, toPointData); + + assertThat(toPointData.getStartEpochNanos()).isEqualTo(pointData.getStartEpochNanos()); + assertThat(toPointData.getEpochNanos()).isEqualTo(pointData.getEpochNanos()); + assertThat(toPointData.getAttributes()).isEqualTo(pointData.getAttributes()); + assertThat(toPointData.getValue()).isEqualTo(pointData.getValue()); + assertThat(toPointData.getExemplars()).isEqualTo(pointData.getExemplars()); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void toMetricData(MemoryMode memoryMode) { + init(memoryMode); + AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordLong(10); @@ -100,4 +213,24 @@ void toMetricData() { Collections.singletonList( ImmutableLongPointData.create(2, 100, Attributes.empty(), 10))))); } + + @Test + void testReusablePointOnCollect() { + init(MemoryMode.REUSABLE_DATA); + AggregatorHandle handle = aggregator.createHandle(); + handle.recordLong(1); + LongPointData pointData = + handle.aggregateThenMaybeReset(0, 10, Attributes.empty(), /* reset= */ false); + + handle.recordLong(1); + LongPointData pointData2 = + handle.aggregateThenMaybeReset(0, 10, Attributes.empty(), /* reset= */ false); + + assertThat(pointData).isSameAs(pointData2); + + LongPointData pointDataWithReset = + handle.aggregateThenMaybeReset(0, 10, Attributes.empty(), /* reset= */ true); + + assertThat(pointData).isSameAs(pointDataWithReset); + } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongSumAggregatorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongSumAggregatorTest.java index d6f0f189834..5ec6f4be28d 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongSumAggregatorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongSumAggregatorTest.java @@ -7,12 +7,14 @@ import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.api.trace.TraceFlags; import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.InstrumentValueType; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; @@ -21,6 +23,7 @@ import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongExemplarData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; +import io.opentelemetry.sdk.metrics.internal.data.MutableLongPointData; import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; @@ -30,6 +33,8 @@ import java.util.List; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; @@ -44,24 +49,33 @@ class LongSumAggregatorTest { private static final InstrumentationScopeInfo library = InstrumentationScopeInfo.empty(); private static final MetricDescriptor metricDescriptor = MetricDescriptor.create("name", "description", "unit"); - private static final LongSumAggregator aggregator = - new LongSumAggregator( - InstrumentDescriptor.create( - "instrument_name", - "instrument_description", - "instrument_unit", - InstrumentType.COUNTER, - InstrumentValueType.LONG, - Advice.empty()), - ExemplarReservoir::longNoSamples); + private LongSumAggregator aggregator; - @Test - void createHandle() { + private void init(MemoryMode memoryMode) { + aggregator = + new LongSumAggregator( + InstrumentDescriptor.create( + "instrument_name", + "instrument_description", + "instrument_unit", + InstrumentType.COUNTER, + InstrumentValueType.LONG, + Advice.empty()), + ExemplarReservoir::longNoSamples, + memoryMode); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void createHandle(MemoryMode memoryMode) { + init(memoryMode); assertThat(aggregator.createHandle()).isInstanceOf(LongSumAggregator.Handle.class); } - @Test - void multipleRecords() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void multipleRecords(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordLong(12); aggregatorHandle.recordLong(12); @@ -75,8 +89,10 @@ void multipleRecords() { .isEqualTo(12 * 5); } - @Test - void multipleRecords_WithNegatives() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void multipleRecords_WithNegatives(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordLong(12); aggregatorHandle.recordLong(12); @@ -91,8 +107,10 @@ void multipleRecords_WithNegatives() { .isEqualTo(14); } - @Test - void aggregateThenMaybeReset() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void aggregateThenMaybeReset(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordLong(13); @@ -112,8 +130,9 @@ void aggregateThenMaybeReset() { .isEqualTo(-13); } - @Test - void aggregateThenMaybeReset_WithExemplars() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void aggregateThenMaybeReset_WithExemplars(MemoryMode memoryMode) { Attributes attributes = Attributes.builder().put("test", "value").build(); LongExemplarData exemplar = ImmutableLongExemplarData.create( @@ -136,7 +155,8 @@ void aggregateThenMaybeReset_WithExemplars() { InstrumentType.COUNTER, InstrumentValueType.LONG, Advice.empty()), - () -> reservoir); + () -> reservoir, + memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordLong(0, attributes, Context.root()); assertThat( @@ -144,8 +164,9 @@ void aggregateThenMaybeReset_WithExemplars() { .isEqualTo(ImmutableLongPointData.create(0, 1, Attributes.empty(), 0, exemplars)); } - @Test - void mergeAndDiff() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void mergeAndDiff(MemoryMode memoryMode) { LongExemplarData exemplar = ImmutableLongExemplarData.create( Attributes.empty(), @@ -168,7 +189,8 @@ void mergeAndDiff() { instrumentType, InstrumentValueType.LONG, Advice.empty()), - ExemplarReservoir::longNoSamples); + ExemplarReservoir::longNoSamples, + memoryMode); LongPointData diffed = aggregator.diff( @@ -184,9 +206,99 @@ void mergeAndDiff() { } } - @Test - @SuppressWarnings("unchecked") - void toMetricData() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void diffInPlace(MemoryMode memoryMode) { + init(memoryMode); + Attributes attributes = Attributes.builder().put("test", "value").build(); + LongExemplarData exemplar = + ImmutableLongExemplarData.create( + attributes, + 2L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 1); + List exemplars = Collections.singletonList(exemplar); + List previousExemplars = + Collections.singletonList( + ImmutableLongExemplarData.create( + attributes, + 1L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 2)); + + MutableLongPointData previous = new MutableLongPointData(); + MutableLongPointData current = new MutableLongPointData(); + + previous.set(0, 1, Attributes.empty(), 1, previousExemplars); + current.set(0, 1, Attributes.empty(), 3, exemplars); + + aggregator.diffInPlace(previous, current); + + /* Assert that latest measurement is kept and set on {@code previous} */ + assertThat(previous.getStartEpochNanos()).isEqualTo(current.getStartEpochNanos()); + assertThat(previous.getEpochNanos()).isEqualTo(current.getEpochNanos()); + assertThat(previous.getAttributes()).isEqualTo(current.getAttributes()); + assertThat(previous.getValue()).isEqualTo(2); + assertThat(previous.getExemplars()).isEqualTo(current.getExemplars()); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void copyPoint(MemoryMode memoryMode) { + init(memoryMode); + MutableLongPointData pointData = (MutableLongPointData) aggregator.createReusablePoint(); + + Attributes attributes = Attributes.of(AttributeKey.longKey("test"), 100L); + List examplarsFrom = + Collections.singletonList( + ImmutableLongExemplarData.create( + attributes, + 2L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 1)); + pointData.set(0, 1, attributes, 2000, examplarsFrom); + + MutableLongPointData toPointData = (MutableLongPointData) aggregator.createReusablePoint(); + + Attributes toAttributes = Attributes.of(AttributeKey.longKey("test"), 100L); + List examplarsTo = + Collections.singletonList( + ImmutableLongExemplarData.create( + attributes, + 4L, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 2)); + toPointData.set(0, 2, toAttributes, 4000, examplarsTo); + + aggregator.copyPoint(pointData, toPointData); + + assertThat(toPointData.getStartEpochNanos()).isEqualTo(pointData.getStartEpochNanos()); + assertThat(toPointData.getEpochNanos()).isEqualTo(pointData.getEpochNanos()); + assertThat(toPointData.getAttributes()).isEqualTo(pointData.getAttributes()); + assertThat(toPointData.getValue()).isEqualTo(pointData.getValue()); + assertThat(toPointData.getExemplars()).isEqualTo(pointData.getExemplars()); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void toMetricData(MemoryMode memoryMode) { + init(memoryMode); AggregatorHandle aggregatorHandle = aggregator.createHandle(); aggregatorHandle.recordLong(10); @@ -216,8 +328,10 @@ void toMetricData() { .hasValue(10))); } - @Test - void toMetricDataWithExemplars() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void toMetricDataWithExemplars(MemoryMode memoryMode) { + init(memoryMode); Attributes attributes = Attributes.builder().put("test", "value").build(); LongExemplarData exemplar = ImmutableLongExemplarData.create( @@ -242,4 +356,21 @@ void toMetricDataWithExemplars() { .hasLongSumSatisfying( sum -> sum.hasPointsSatisfying(point -> point.hasValue(1).hasExemplars(exemplar))); } + + @Test + void sameObjectReturnedOnReusableDataMemoryMode() { + init(MemoryMode.REUSABLE_DATA); + AggregatorHandle aggregatorHandle = aggregator.createHandle(); + + aggregatorHandle.recordLong(1L); + LongPointData firstCollection = + aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false); + + aggregatorHandle.recordLong(1L); + LongPointData secondCollection = + aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false); + + // Should be same object since we are in REUSABLE_DATA mode. + assertThat(firstCollection).isSameAs(secondCollection); + } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/data/MutableExponentialHistogramBucketsTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/data/MutableExponentialHistogramBucketsTest.java new file mode 100644 index 00000000000..067f34a54c7 --- /dev/null +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/data/MutableExponentialHistogramBucketsTest.java @@ -0,0 +1,46 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.data; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList; +import org.junit.jupiter.api.Test; + +class MutableExponentialHistogramBucketsTest { + + @Test + void testSanity() { + MutableExponentialHistogramBuckets buckets = new MutableExponentialHistogramBuckets(); + assertThat(buckets.getScale()).isEqualTo(0); + assertThat(buckets.getOffset()).isEqualTo(0); + assertThat(buckets.getTotalCount()).isEqualTo(0); + assertThat(buckets.getBucketCounts()).isEmpty(); + assertThat(buckets.getReusableBucketCountsList()).isEmpty(); + + DynamicPrimitiveLongList bucketCounts = DynamicPrimitiveLongList.of(1, 2, 3); + buckets.set(1, 2, 3, bucketCounts); + + assertThat(buckets.getScale()).isEqualTo(1); + assertThat(buckets.getOffset()).isEqualTo(2); + assertThat(buckets.getTotalCount()).isEqualTo(3); + assertThat(buckets.getBucketCounts()).containsExactly(1L, 2L, 3L); + assertThat(buckets.getReusableBucketCountsList()).containsExactly(1L, 2L, 3L); + + assertThat(buckets.toString()) + .isEqualTo( + "MutableExponentialHistogramBuckets{scale=1, offset=2, bucketCounts=[1, 2, 3], totalCount=3}"); + + MutableExponentialHistogramBuckets sameBuckets = new MutableExponentialHistogramBuckets(); + sameBuckets.set(1, 2, 3, DynamicPrimitiveLongList.of(1, 2, 3)); + assertThat(sameBuckets).isEqualTo(buckets); + assertThat(sameBuckets.hashCode()).isEqualTo(buckets.hashCode()); + + sameBuckets.set(1, 2, 3, DynamicPrimitiveLongList.of(1, 20, 3)); + assertThat(sameBuckets).isNotEqualTo(buckets); + assertThat(sameBuckets.hashCode()).isNotEqualTo(buckets.hashCode()); + } +} diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/data/MutableExponentialHistogramPointDataTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/data/MutableExponentialHistogramPointDataTest.java new file mode 100644 index 00000000000..0cd56aa6f72 --- /dev/null +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/data/MutableExponentialHistogramPointDataTest.java @@ -0,0 +1,114 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.data; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList; +import java.util.Collections; +import org.junit.jupiter.api.Test; + +class MutableExponentialHistogramPointDataTest { + + @Test + public void testSanity() { + MutableExponentialHistogramPointData pointData = new MutableExponentialHistogramPointData(); + assertThat(pointData.getSum()).isEqualTo(0); + assertThat(pointData.getCount()).isEqualTo(0); + assertThat(pointData.getPositiveBuckets().getTotalCount()).isEqualTo(0); + assertThat(pointData.getNegativeBuckets().getTotalCount()).isEqualTo(0); + assertThat(pointData.getExemplars()).isEmpty(); + + MutableExponentialHistogramBuckets positiveBuckets = new MutableExponentialHistogramBuckets(); + positiveBuckets.set( + /* scale= */ 1, /* offset= */ 2, /* totalCount= */ 3, DynamicPrimitiveLongList.of(1, 2, 3)); + MutableExponentialHistogramBuckets negativeBuckets = new MutableExponentialHistogramBuckets(); + negativeBuckets.set(10, 20, 30, DynamicPrimitiveLongList.of(50, 60, 70)); + + pointData.set( + /* scale= */ 1, + /* sum= */ 2, + /* zeroCount= */ 10, + /* hasMin= */ true, + /* min= */ 100, + /* hasMax= */ true, + /* max= */ 1000, + positiveBuckets, + negativeBuckets, + /* startEpochNanos= */ 10, + /* epochNanos= */ 20, + Attributes.of(AttributeKey.stringKey("foo"), "bar"), + Collections.emptyList()); + + assertThat(pointData.getSum()).isEqualTo(2); + assertThat(pointData.getCount()).isEqualTo(10 + 30 + 3); + assertThat(pointData.getAttributes().get(AttributeKey.stringKey("foo"))).isEqualTo("bar"); + assertThat(pointData.getAttributes().size()).isEqualTo(1); + assertThat(pointData.getScale()).isEqualTo(1); + assertThat(pointData.getZeroCount()).isEqualTo(10); + assertThat(pointData.hasMin()).isTrue(); + assertThat(pointData.getMin()).isEqualTo(100); + assertThat(pointData.hasMax()).isTrue(); + assertThat(pointData.getMax()).isEqualTo(1000); + assertThat(pointData.getPositiveBuckets().getTotalCount()).isEqualTo(3); + assertThat(pointData.getNegativeBuckets().getTotalCount()).isEqualTo(30); + assertThat(pointData.getPositiveBuckets().getBucketCounts()).containsExactly(1L, 2L, 3L); + assertThat(pointData.getNegativeBuckets().getBucketCounts()).containsExactly(50L, 60L, 70L); + assertThat(pointData.getStartEpochNanos()).isEqualTo(10); + assertThat(pointData.getEpochNanos()).isEqualTo(20); + assertThat(pointData.getExemplars()).isEmpty(); + + assertThat(pointData.toString()) + .isEqualTo( + "MutableExponentialHistogramPointData{startEpochNanos=10, epochNanos=20, " + + "attributes={foo=\"bar\"}, scale=1, sum=2.0, count=43, zeroCount=10, hasMin=true, " + + "min=100.0, hasMax=true, max=1000.0, " + + "positiveBuckets=MutableExponentialHistogramBuckets{scale=1, offset=2, " + + "bucketCounts=[1, 2, 3], totalCount=3}, " + + "negativeBuckets=MutableExponentialHistogramBuckets{scale=10, offset=20, " + + "bucketCounts=[50, 60, 70], totalCount=30}, exemplars=[]}"); + + MutableExponentialHistogramPointData samePointData = new MutableExponentialHistogramPointData(); + samePointData.set( + /* scale= */ 1, + /* sum= */ 2, + /* zeroCount= */ 10, + /* hasMin= */ true, + /* min= */ 100, + /* hasMax= */ true, + /* max= */ 1000, + positiveBuckets, + negativeBuckets, + /* startEpochNanos= */ 10, + /* epochNanos= */ 20, + Attributes.of(AttributeKey.stringKey("foo"), "bar"), + Collections.emptyList()); + assertThat(samePointData).isEqualTo(pointData); + assertThat(samePointData.hashCode()).isEqualTo(pointData.hashCode()); + + MutableExponentialHistogramPointData differentPointData = + new MutableExponentialHistogramPointData(); + differentPointData.set( + /* scale= */ 1, + /* sum= */ 2, + /* zeroCount= */ 10000000, + /* hasMin= */ true, + /* min= */ 100, + /* hasMax= */ true, + /* max= */ 1000, + positiveBuckets, + negativeBuckets, + /* startEpochNanos= */ 10, + /* epochNanos= */ 20, + Attributes.of(AttributeKey.stringKey("foo"), "bar"), + Collections.emptyList()); + + assertThat(differentPointData).isNotEqualTo(pointData); + assertThat(differentPointData.hashCode()).isNotEqualTo(pointData.hashCode()); + } +} diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/data/MutableHistogramPointDataTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/data/MutableHistogramPointDataTest.java new file mode 100644 index 00000000000..1a8e4d1e0b1 --- /dev/null +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/data/MutableHistogramPointDataTest.java @@ -0,0 +1,165 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.data; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import java.util.Arrays; +import java.util.Collections; +import org.junit.jupiter.api.Test; + +public class MutableHistogramPointDataTest { + + @Test + void testSanity() { + MutableHistogramPointData pointData = new MutableHistogramPointData(10); + assertThat(pointData.getSum()).isEqualTo(0); + assertThat(pointData.getCount()).isEqualTo(0); + assertThat(pointData.getBoundaries()).isEmpty(); + assertThat(pointData.getCounts().size()).isEqualTo(10); + assertThat(pointData.getExemplars()).isEmpty(); + + pointData.set( + /* startEpochNanos= */ 10, + /* epochNanos= */ 20, + Attributes.of(AttributeKey.stringKey("foo"), "bar"), + /* sum= */ 2, + /* hasMin= */ true, + /* min= */ 100, + /* hasMax= */ true, + /* max= */ 1000, + /* boundaries= */ Arrays.asList(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0), + /* counts= */ new long[] {10, 20, 30, 40, 50, 60, 70, 80, 90, 100}, + Collections.emptyList()); + + assertThat(pointData.getSum()).isEqualTo(2); + assertThat(pointData.getCount()).isEqualTo(10 + 20 + 30 + 40 + 50 + 60 + 70 + 80 + 90 + 100); + assertThat(pointData.getAttributes().get(AttributeKey.stringKey("foo"))).isEqualTo("bar"); + assertThat(pointData.getAttributes().size()).isEqualTo(1); + assertThat(pointData.getBoundaries()) + .containsExactly(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0); + assertThat(pointData.getCounts().toArray()) + .containsExactly(10L, 20L, 30L, 40L, 50L, 60L, 70L, 80L, 90L, 100L); + assertThat(pointData.getStartEpochNanos()).isEqualTo(10); + assertThat(pointData.getEpochNanos()).isEqualTo(20); + + assertThat(pointData.hasMin()).isTrue(); + assertThat(pointData.getMin()).isEqualTo(100); + assertThat(pointData.hasMax()).isTrue(); + assertThat(pointData.getMax()).isEqualTo(1000); + assertThat(pointData.getExemplars()).isEmpty(); + assertThat(pointData.toString()) + .isEqualTo( + "MutableHistogramPointData{startEpochNanos=10, " + + "epochNanos=20, " + + "attributes={foo=\"bar\"}, " + + "sum=2.0, " + + "count=550, " + + "hasMin=true, " + + "min=100.0, " + + "hasMax=true, " + + "max=1000.0, " + + "boundaries=[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0], " + + "counts=[10, 20, 30, 40, 50, 60, 70, 80, 90, 100], " + + "exemplars=[]}"); + + MutableHistogramPointData anotherPointData = new MutableHistogramPointData(10); + // Same values + anotherPointData.set( + /* startEpochNanos= */ 10, + /* epochNanos= */ 20, + Attributes.of(AttributeKey.stringKey("foo"), "bar"), + /* sum= */ 2, + /* hasMin= */ true, + /* min= */ 100, + /* hasMax= */ true, + /* max= */ 1000, + /* boundaries= */ Arrays.asList(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0), + /* counts= */ new long[] {10, 20, 30, 40, 50, 60, 70, 80, 90, 100}, + Collections.emptyList()); + assertThat(anotherPointData).isEqualTo(pointData); + assertThat(anotherPointData.hashCode()).isEqualTo(pointData.hashCode()); + + // Same values but different sum + anotherPointData.set( + /* startEpochNanos= */ 10, + /* epochNanos= */ 20, + Attributes.of(AttributeKey.stringKey("foo"), "bar"), + /* sum= */ 20000, + /* hasMin= */ true, + /* min= */ 100, + /* hasMax= */ true, + /* max= */ 1000, + /* boundaries= */ Arrays.asList(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0), + /* counts= */ new long[] {10, 20, 30, 40, 50, 60, 70, 80, 90, 100}, + Collections.emptyList()); + assertThat(anotherPointData).isNotEqualTo(pointData); + assertThat(anotherPointData.hashCode()).isNotEqualTo(pointData.hashCode()); + } + + @Test() + void testBoundaries() { + MutableHistogramPointData pointData = new MutableHistogramPointData(10); + assertThatThrownBy( + () -> + pointData.set( + /* startEpochNanos= */ 10, + /* epochNanos= */ 20, + Attributes.of(AttributeKey.stringKey("foo"), "bar"), + /* sum= */ 2, + /* hasMin= */ true, + /* min= */ 100, + /* hasMax= */ true, + /* max= */ 1000, + /* boundaries= */ Arrays.asList(1.0, 2.0, 3.0, 4.0), + /* counts= */ new long[] {10, 20, 30, 40, 50, 60, 70, 80, 90, 100}, + Collections.emptyList())) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("invalid boundaries: size should be 9 but was 4"); + + assertThatThrownBy( + () -> + pointData.set( + /* startEpochNanos= */ 10, + /* epochNanos= */ 20, + Attributes.of(AttributeKey.stringKey("foo"), "bar"), + /* sum= */ 2, + /* hasMin= */ true, + /* min= */ 100, + /* hasMax= */ true, + /* max= */ 1000, + /* boundaries= */ Arrays.asList( + 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, Double.POSITIVE_INFINITY), + /* counts= */ new long[] {10, 20, 30, 40, 50, 60, 70, 80, 90, 100}, + Collections.emptyList())) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("invalid boundaries: contains explicit +/-Inf"); + } + + @Test + void testCounts() { + MutableHistogramPointData pointData = new MutableHistogramPointData(10); + assertThatThrownBy( + () -> + pointData.set( + /* startEpochNanos= */ 10, + /* epochNanos= */ 20, + Attributes.of(AttributeKey.stringKey("foo"), "bar"), + /* sum= */ 2, + /* hasMin= */ true, + /* min= */ 100, + /* hasMax= */ true, + /* max= */ 1000, + /* boundaries= */ Arrays.asList(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0), + /* counts= */ new long[] {10, 20, 30, 40, 50, 60}, + Collections.emptyList())) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("invalid counts: size should be 10 but was 6"); + } +} diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/DoubleRandomFixedSizeExemplarReservoirTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/DoubleRandomFixedSizeExemplarReservoirTest.java index 769c2e2b5ce..85a53fbeb0d 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/DoubleRandomFixedSizeExemplarReservoirTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/DoubleRandomFixedSizeExemplarReservoirTest.java @@ -114,7 +114,7 @@ public void multiMeasurements_preservesLatestSamples() { @Override public int nextInt(int max) { switch (max) { - // Force one sample in bucket 1 and two in bucket 0. + // Force one sample in bucket 1 and two in bucket 0. case 2: return 1; default: diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongRandomFixedSizeExemplarReservoirTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongRandomFixedSizeExemplarReservoirTest.java index ed9f1ea759e..fc754576401 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongRandomFixedSizeExemplarReservoirTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongRandomFixedSizeExemplarReservoirTest.java @@ -114,7 +114,7 @@ public void multiMeasurements_preservesLatestSamples() { @Override public int nextInt(int max) { switch (max) { - // Force one sample in bucket 1 and two in bucket 0. + // Force one sample in bucket 1 and two in bucket 0. case 2: return 1; default: diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongToDoubleExemplarReservoirTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongToDoubleExemplarReservoirTest.java new file mode 100644 index 00000000000..fc7a5a3c07a --- /dev/null +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongToDoubleExemplarReservoirTest.java @@ -0,0 +1,39 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.exemplar; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.context.Context; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class LongToDoubleExemplarReservoirTest { + @Mock ExemplarReservoir delegate; + + @Test + void offerDoubleMeasurement() { + ExemplarReservoir filtered = new LongToDoubleExemplarReservoir<>(delegate); + filtered.offerDoubleMeasurement(1.0, Attributes.empty(), Context.root()); + verify(delegate).offerDoubleMeasurement(1.0, Attributes.empty(), Context.root()); + verify(delegate, never()).offerLongMeasurement(anyLong(), any(), any()); + } + + @Test + void offerLongMeasurement() { + ExemplarReservoir filtered = new LongToDoubleExemplarReservoir<>(delegate); + filtered.offerLongMeasurement(1L, Attributes.empty(), Context.root()); + verify(delegate).offerDoubleMeasurement(1.0, Attributes.empty(), Context.root()); + verify(delegate, never()).offerLongMeasurement(anyLong(), any(), any()); + } +} diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/export/RegisteredReaderTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/export/RegisteredReaderTest.java index 36e727c861e..e151876e6a9 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/export/RegisteredReaderTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/export/RegisteredReaderTest.java @@ -24,7 +24,6 @@ void create_UniqueIdentity() { RegisteredReader registeredReader1 = RegisteredReader.create(reader, ViewRegistry.create()); RegisteredReader registeredReader2 = RegisteredReader.create(reader, ViewRegistry.create()); - assertThat(registeredReader1).isEqualTo(registeredReader1); assertThat(registeredReader1).isNotEqualTo(registeredReader2); assertThat(registeredReader1.hashCode()).isEqualTo(registeredReader1.hashCode()); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/ArrayBasedStackTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/ArrayBasedStackTest.java new file mode 100644 index 00000000000..e8cbda1764c --- /dev/null +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/ArrayBasedStackTest.java @@ -0,0 +1,63 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; + +import org.junit.jupiter.api.Test; + +class ArrayBasedStackTest { + + @Test + void testPushAndPop() { + ArrayBasedStack stack = new ArrayBasedStack<>(); + stack.push(1); + stack.push(2); + assertThat(stack.pop()).isEqualTo(2); + assertThat(stack.pop()).isEqualTo(1); + } + + @Test + void testIsEmpty() { + ArrayBasedStack stack = new ArrayBasedStack<>(); + assertThat(stack.isEmpty()).isTrue(); + stack.push(1); + assertThat(stack.isEmpty()).isFalse(); + } + + @Test + void testSize() { + ArrayBasedStack stack = new ArrayBasedStack<>(); + assertThat(stack.size()).isEqualTo(0); + stack.push(1); + assertThat(stack.size()).isEqualTo(1); + } + + @Test + void testPushBeyondInitialCapacity() { + ArrayBasedStack stack = new ArrayBasedStack<>(); + for (int i = 0; i < ArrayBasedStack.DEFAULT_CAPACITY + 5; i++) { + stack.push(i); + } + assertThat(stack.size()).isEqualTo(ArrayBasedStack.DEFAULT_CAPACITY + 5); + for (int i = ArrayBasedStack.DEFAULT_CAPACITY + 4; i >= 0; i--) { + assertThat(stack.pop()).isEqualTo(i); + } + } + + @Test + void testPopOnEmptyStack() { + ArrayBasedStack stack = new ArrayBasedStack<>(); + assertThat(stack.pop()).isNull(); + } + + @Test + void testPushNullElement() { + ArrayBasedStack stack = new ArrayBasedStack<>(); + assertThatThrownBy(() -> stack.push(null)).isInstanceOf(NullPointerException.class); + } +} diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/AsynchronousMetricStorageTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/AsynchronousMetricStorageTest.java index fb1d147e4c5..23c45a9349f 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/AsynchronousMetricStorageTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/AsynchronousMetricStorageTest.java @@ -5,8 +5,9 @@ package io.opentelemetry.sdk.metrics.internal.state; -import static io.opentelemetry.sdk.metrics.internal.state.Measurement.doubleMeasurement; -import static io.opentelemetry.sdk.metrics.internal.state.Measurement.longMeasurement; +import static io.opentelemetry.sdk.common.export.MemoryMode.REUSABLE_DATA; +import static io.opentelemetry.sdk.metrics.internal.state.ImmutableMeasurement.createDouble; +import static io.opentelemetry.sdk.metrics.internal.state.ImmutableMeasurement.createLong; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static org.mockito.ArgumentMatchers.any; @@ -16,12 +17,16 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.InstrumentSelector; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.InstrumentValueType; import io.opentelemetry.sdk.metrics.View; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.data.PointData; import io.opentelemetry.sdk.metrics.export.MetricReader; +import io.opentelemetry.sdk.metrics.internal.data.MutableLongPointData; import io.opentelemetry.sdk.metrics.internal.debug.SourceInfo; import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; @@ -31,10 +36,12 @@ import io.opentelemetry.sdk.metrics.internal.view.ViewRegistry; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.time.TestClock; -import org.junit.jupiter.api.BeforeEach; +import java.util.Collection; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.RegisterExtension; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @@ -65,9 +72,10 @@ class AsynchronousMetricStorageTest { private AsynchronousMetricStorage longCounterStorage; private AsynchronousMetricStorage doubleCounterStorage; - @BeforeEach - void setup() { + // Not using @BeforeEach since many methods require executing them for each MemoryMode + void setup(MemoryMode memoryMode) { when(reader.getAggregationTemporality(any())).thenReturn(AggregationTemporality.CUMULATIVE); + when(reader.getMemoryMode()).thenReturn(memoryMode); registeredReader = RegisteredReader.create(reader, ViewRegistry.create()); longCounterStorage = @@ -94,14 +102,14 @@ void setup() { Advice.empty())); } - @Test - void recordLong() { - longCounterStorage.record( - longMeasurement(0, 1, 1, Attributes.builder().put("key", "a").build())); - longCounterStorage.record( - longMeasurement(0, 1, 2, Attributes.builder().put("key", "b").build())); - longCounterStorage.record( - longMeasurement(0, 1, 3, Attributes.builder().put("key", "c").build())); + @ParameterizedTest + @EnumSource(MemoryMode.class) + void recordLong(MemoryMode memoryMode) { + setup(memoryMode); + + longCounterStorage.record(createLong(0, 1, 1, Attributes.builder().put("key", "a").build())); + longCounterStorage.record(createLong(0, 1, 2, Attributes.builder().put("key", "b").build())); + longCounterStorage.record(createLong(0, 1, 3, Attributes.builder().put("key", "c").build())); assertThat(longCounterStorage.collect(resource, scope, 0, testClock.nanoTime())) .satisfies( @@ -119,14 +127,17 @@ void recordLong() { assertThat(logs.size()).isEqualTo(0); } - @Test - void recordDouble() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void recordDouble(MemoryMode memoryMode) { + setup(memoryMode); + doubleCounterStorage.record( - doubleMeasurement(0, 1, 1.1, Attributes.builder().put("key", "a").build())); + createDouble(0, 1, 1.1, Attributes.builder().put("key", "a").build())); doubleCounterStorage.record( - doubleMeasurement(0, 1, 2.2, Attributes.builder().put("key", "b").build())); + createDouble(0, 1, 2.2, Attributes.builder().put("key", "b").build())); doubleCounterStorage.record( - doubleMeasurement(0, 1, 3.3, Attributes.builder().put("key", "c").build())); + createDouble(0, 1, 3.3, Attributes.builder().put("key", "c").build())); assertThat(doubleCounterStorage.collect(resource, scope, 0, testClock.nanoTime())) .satisfies( @@ -146,8 +157,11 @@ void recordDouble() { assertThat(logs.size()).isEqualTo(0); } - @Test - void record_ProcessesAttributes() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void record_ProcessesAttributes(MemoryMode memoryMode) { + setup(memoryMode); + AsynchronousMetricStorage storage = AsynchronousMetricStorage.create( registeredReader, @@ -166,7 +180,7 @@ void record_ProcessesAttributes() { Advice.empty())); storage.record( - longMeasurement(0, 1, 1, Attributes.builder().put("key1", "a").put("key2", "b").build())); + createLong(0, 1, 1, Attributes.builder().put("key1", "a").put("key2", "b").build())); assertThat(storage.collect(resource, scope, 0, testClock.nanoTime())) .satisfies( @@ -180,11 +194,14 @@ void record_ProcessesAttributes() { assertThat(logs.size()).isEqualTo(0); } - @Test - void record_MaxCardinality() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void record_MaxCardinality(MemoryMode memoryMode) { + setup(memoryMode); + for (int i = 0; i <= CARDINALITY_LIMIT + 1; i++) { longCounterStorage.record( - longMeasurement(0, 1, 1, Attributes.builder().put("key" + i, "val").build())); + createLong(0, 1, 1, Attributes.builder().put("key" + i, "val").build())); } assertThat(longCounterStorage.collect(resource, scope, 0, testClock.nanoTime())) @@ -194,12 +211,13 @@ void record_MaxCardinality() { logs.assertContains("Instrument long-counter has exceeded the maximum allowed cardinality"); } - @Test - void record_DuplicateAttributes() { - longCounterStorage.record( - longMeasurement(0, 1, 1, Attributes.builder().put("key1", "a").build())); - longCounterStorage.record( - longMeasurement(0, 1, 2, Attributes.builder().put("key1", "a").build())); + @ParameterizedTest + @EnumSource(MemoryMode.class) + void record_DuplicateAttributes(MemoryMode memoryMode) { + setup(memoryMode); + + longCounterStorage.record(createLong(0, 1, 1, Attributes.builder().put("key1", "a").build())); + longCounterStorage.record(createLong(0, 1, 2, Attributes.builder().put("key1", "a").build())); assertThat(longCounterStorage.collect(resource, scope, 0, testClock.nanoTime())) .satisfies( @@ -214,10 +232,13 @@ void record_DuplicateAttributes() { "Instrument long-counter has recorded multiple values for the same attributes: {key1=\"a\"}"); } - @Test - void collect_CumulativeReportsCumulativeObservations() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void collect_CumulativeReportsCumulativeObservations(MemoryMode memoryMode) { + setup(memoryMode); + // Record measurement and collect at time 10 - longCounterStorage.record(longMeasurement(0, 10, 3, Attributes.empty())); + longCounterStorage.record(createLong(0, 10, 3, Attributes.empty())); assertThat(longCounterStorage.collect(resource, scope, 0, 0)) .hasLongSumSatisfying( sum -> @@ -232,9 +253,9 @@ void collect_CumulativeReportsCumulativeObservations() { registeredReader.setLastCollectEpochNanos(10); // Record measurements and collect at time 30 - longCounterStorage.record(longMeasurement(0, 30, 3, Attributes.empty())); + longCounterStorage.record(createLong(0, 30, 3, Attributes.empty())); longCounterStorage.record( - longMeasurement(0, 30, 6, Attributes.builder().put("key", "value1").build())); + createLong(0, 30, 6, Attributes.builder().put("key", "value1").build())); assertThat(longCounterStorage.collect(resource, scope, 0, 0)) .hasLongSumSatisfying( sum -> @@ -255,9 +276,9 @@ void collect_CumulativeReportsCumulativeObservations() { registeredReader.setLastCollectEpochNanos(30); // Record measurement and collect at time 35 - longCounterStorage.record(longMeasurement(0, 35, 4, Attributes.empty())); + longCounterStorage.record(createLong(0, 35, 4, Attributes.empty())); longCounterStorage.record( - longMeasurement(0, 35, 5, Attributes.builder().put("key", "value2").build())); + createLong(0, 35, 5, Attributes.builder().put("key", "value2").build())); assertThat(longCounterStorage.collect(resource, scope, 0, 0)) .hasLongSumSatisfying( sum -> @@ -277,8 +298,11 @@ void collect_CumulativeReportsCumulativeObservations() { .hasAttributes(Attributes.builder().put("key", "value2").build()))); } - @Test - void collect_DeltaComputesDiff() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void collect_DeltaComputesDiff(MemoryMode memoryMode) { + setup(memoryMode); + when(reader.getAggregationTemporality(any())).thenReturn(AggregationTemporality.DELTA); longCounterStorage = AsynchronousMetricStorage.create( @@ -293,7 +317,7 @@ void collect_DeltaComputesDiff() { Advice.empty())); // Record measurement and collect at time 10 - longCounterStorage.record(longMeasurement(0, 10, 3, Attributes.empty())); + longCounterStorage.record(createLong(0, 10, 3, Attributes.empty())); assertThat(longCounterStorage.collect(resource, scope, 0, 0)) .hasLongSumSatisfying( sum -> @@ -308,9 +332,9 @@ void collect_DeltaComputesDiff() { registeredReader.setLastCollectEpochNanos(10); // Record measurement and collect at time 30 - longCounterStorage.record(longMeasurement(0, 30, 3, Attributes.empty())); + longCounterStorage.record(createLong(0, 30, 3, Attributes.empty())); longCounterStorage.record( - longMeasurement(0, 30, 6, Attributes.builder().put("key", "value1").build())); + createLong(0, 30, 6, Attributes.builder().put("key", "value1").build())); assertThat(longCounterStorage.collect(resource, scope, 0, 0)) .hasLongSumSatisfying( sum -> @@ -331,9 +355,9 @@ void collect_DeltaComputesDiff() { registeredReader.setLastCollectEpochNanos(30); // Record measurement and collect at time 35 - longCounterStorage.record(longMeasurement(0, 35, 4, Attributes.empty())); + longCounterStorage.record(createLong(0, 35, 4, Attributes.empty())); longCounterStorage.record( - longMeasurement(0, 35, 5, Attributes.builder().put("key", "value2").build())); + createLong(0, 35, 5, Attributes.builder().put("key", "value2").build())); assertThat(longCounterStorage.collect(resource, scope, 0, 0)) .hasLongSumSatisfying( sum -> @@ -352,4 +376,54 @@ void collect_DeltaComputesDiff() { .hasValue(5) .hasAttributes(Attributes.builder().put("key", "value2").build()))); } + + @Test + void collect_reusableData_reusedObjectsAreReturnedOnSecondCall() { + setup(REUSABLE_DATA); + + longCounterStorage.record(createLong(0, 1, 1, Attributes.builder().put("key", "a").build())); + longCounterStorage.record(createLong(0, 1, 2, Attributes.builder().put("key", "b").build())); + longCounterStorage.record(createLong(0, 1, 3, Attributes.builder().put("key", "c").build())); + + MetricData firstCollectMetricData = + longCounterStorage.collect(resource, scope, 0, testClock.nanoTime()); + assertThat(firstCollectMetricData) + .satisfies( + metricData -> + assertThat(metricData) + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(1) + .hasAttributes(attributeEntry("key", "a")) + .isInstanceOf(MutableLongPointData.class), + point -> + point + .hasValue(2) + .hasAttributes(attributeEntry("key", "b")) + .isInstanceOf(MutableLongPointData.class), + point -> + point + .hasValue(3) + .hasAttributes(attributeEntry("key", "c")) + .isInstanceOf(MutableLongPointData.class)))); + + MetricData secondCollectMetricData = + longCounterStorage.collect(resource, scope, 0, testClock.nanoTime()); + + Collection secondCollectPoints = + secondCollectMetricData.getData().getPoints(); + Collection firstCollectionPoints = + firstCollectMetricData.getData().getPoints(); + assertThat(secondCollectPoints).hasSameSizeAs(firstCollectionPoints); + + // Show that second returned objects have been used in first collect response as well + // which proves there is reuse. + for (PointData firstCollectionPoint : firstCollectionPoints) { + assertThat(secondCollectPoints) + .anySatisfy(point -> assertThat(point).isSameAs(firstCollectionPoint)); + } + } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/CallbackRegistrationTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/CallbackRegistrationTest.java index a15f8e4affd..a8b424dfcd5 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/CallbackRegistrationTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/CallbackRegistrationTest.java @@ -5,8 +5,8 @@ package io.opentelemetry.sdk.metrics.internal.state; -import static io.opentelemetry.sdk.metrics.internal.state.Measurement.doubleMeasurement; -import static io.opentelemetry.sdk.metrics.internal.state.Measurement.longMeasurement; +import static io.opentelemetry.sdk.metrics.internal.state.ImmutableMeasurement.createDouble; +import static io.opentelemetry.sdk.metrics.internal.state.ImmutableMeasurement.createLong; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.ArgumentMatchers.any; @@ -19,6 +19,7 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.InstrumentValueType; import io.opentelemetry.sdk.metrics.export.MetricReader; @@ -76,6 +77,7 @@ class CallbackRegistrationTest { @BeforeEach void setup() { + when(reader.getMemoryMode()).thenReturn(MemoryMode.IMMUTABLE_DATA); registeredReader = RegisteredReader.create(reader, ViewRegistry.create()); when(storage1.getRegisteredReader()).thenReturn(registeredReader); when(storage2.getRegisteredReader()).thenReturn(registeredReader); @@ -145,7 +147,7 @@ void invokeCallback_Double() { assertThat(counter.get()).isEqualTo(1.1); verify(storage1) - .record(doubleMeasurement(0, 1, 1.1, Attributes.builder().put("key", "val").build())); + .record(createDouble(0, 1, 1.1, Attributes.builder().put("key", "val").build())); verify(storage2, never()).record(any()); verify(storage3, never()).record(any()); } @@ -164,10 +166,8 @@ void invokeCallback_Long() { assertThat(counter.get()).isEqualTo(1); verify(storage1, never()).record(any()); - verify(storage2) - .record(longMeasurement(0, 1, 1, Attributes.builder().put("key", "val").build())); - verify(storage3) - .record(longMeasurement(0, 1, 1, Attributes.builder().put("key", "val").build())); + verify(storage2).record(createLong(0, 1, 1, Attributes.builder().put("key", "val").build())); + verify(storage3).record(createLong(0, 1, 1, Attributes.builder().put("key", "val").build())); } @Test @@ -189,11 +189,9 @@ void invokeCallback_MultipleMeasurements() { assertThat(doubleCounter.get()).isEqualTo(1.1); assertThat(longCounter.get()).isEqualTo(1); verify(storage1) - .record(doubleMeasurement(0, 1, 1.1, Attributes.builder().put("key", "val").build())); - verify(storage2) - .record(longMeasurement(0, 1, 1, Attributes.builder().put("key", "val").build())); - verify(storage3) - .record(longMeasurement(0, 1, 1, Attributes.builder().put("key", "val").build())); + .record(createDouble(0, 1, 1.1, Attributes.builder().put("key", "val").build())); + verify(storage2).record(createLong(0, 1, 1, Attributes.builder().put("key", "val").build())); + verify(storage3).record(createLong(0, 1, 1, Attributes.builder().put("key", "val").build())); } @Test diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/MetricStorageRegistryTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/MetricStorageRegistryTest.java index 8b664ad5d3e..99a26106a78 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/MetricStorageRegistryTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/MetricStorageRegistryTest.java @@ -114,5 +114,10 @@ public void recordLong(long value, Attributes attributes, Context context) {} @Override public void recordDouble(double value, Attributes attributes, Context context) {} + + @Override + public boolean isEnabled() { + return true; + } } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/ObjectPoolTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/ObjectPoolTest.java new file mode 100644 index 00000000000..ee90d303783 --- /dev/null +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/ObjectPoolTest.java @@ -0,0 +1,62 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Supplier; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class ObjectPoolTest { + private ObjectPool objectPool; + + @BeforeEach + void setUp() { + Supplier supplier = StringBuilder::new; + objectPool = new ObjectPool<>(supplier); + } + + @Test + void testBorrowObjectWhenPoolIsEmpty() { + StringBuilder result = objectPool.borrowObject(); + assertThat(result.toString()).isEmpty(); + } + + @Test + void testReturnAndBorrowMultipleObjects() { + // Borrow three objects + StringBuilder borrowed1 = objectPool.borrowObject(); + StringBuilder borrowed2 = objectPool.borrowObject(); + StringBuilder borrowed3 = objectPool.borrowObject(); + + // Modify and return the borrowed objects + borrowed1.append("pooledObject1"); + objectPool.returnObject(borrowed1); + borrowed2.append("pooledObject2"); + objectPool.returnObject(borrowed2); + borrowed3.append("pooledObject3"); + objectPool.returnObject(borrowed3); + + // Borrow three objects, which should be the same ones we just returned + StringBuilder result1 = objectPool.borrowObject(); + StringBuilder result2 = objectPool.borrowObject(); + StringBuilder result3 = objectPool.borrowObject(); + + // Verify the results using AssertJ assertions and reference comparison + List originalObjects = Arrays.asList(borrowed1, borrowed2, borrowed3); + List borrowedObjects = Arrays.asList(result1, result2, result3); + + assertThat(originalObjects).hasSize(3); + assertThat(borrowedObjects).hasSize(3); + + for (StringBuilder original : originalObjects) { + assertThat(borrowedObjects).anySatisfy(borrowed -> assertThat(borrowed).isSameAs(original)); + } + } +} diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/PooledHashMapTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/PooledHashMapTest.java new file mode 100644 index 00000000000..0e4a5ab0328 --- /dev/null +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/PooledHashMapTest.java @@ -0,0 +1,76 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class PooledHashMapTest { + + private PooledHashMap map; + + @BeforeEach + void setup() { + map = new PooledHashMap<>(); + } + + @Test + void putAndGetTest() { + map.put("One", 1); + assertThat(map.get("One")).isEqualTo(1); + } + + @Test + void removeTest() { + map.put("One", 1); + map.remove("One"); + assertThat(map.get("One")).isNull(); + } + + @Test + void sizeTest() { + map.put("One", 1); + map.put("Two", 2); + assertThat(map.size()).isEqualTo(2); + } + + @Test + void isEmptyTest() { + assertThat(map.isEmpty()).isTrue(); + map.put("One", 1); + assertThat(map.isEmpty()).isFalse(); + } + + @Test + void containsKeyTest() { + map.put("One", 1); + assertThat(map.containsKey("One")).isTrue(); + assertThat(map.containsKey("Two")).isFalse(); + } + + @Test + void clearTest() { + map.put("One", 1); + map.put("Two", 2); + map.clear(); + assertThat(map.isEmpty()).isTrue(); + } + + @Test + void forEachTest() { + map.put("One", 1); + map.put("Two", 2); + + Map actualMap = new HashMap<>(); + map.forEach(actualMap::put); + + assertThat(actualMap).containsOnlyKeys("One", "Two").containsValues(1, 2); + } +} diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/SdkObservableMeasurementTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/SdkObservableMeasurementTest.java new file mode 100644 index 00000000000..aaef2c841fe --- /dev/null +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/SdkObservableMeasurementTest.java @@ -0,0 +1,197 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.state; + +import static io.opentelemetry.sdk.metrics.data.AggregationTemporality.CUMULATIVE; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import io.github.netmikey.logunit.api.LogCapturer; +import io.opentelemetry.internal.testing.slf4j.SuppressLogger; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.InstrumentValueType; +import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; +import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; +import io.opentelemetry.sdk.metrics.internal.export.RegisteredReader; +import io.opentelemetry.sdk.metrics.internal.view.ViewRegistry; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; +import java.util.Arrays; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.mockito.ArgumentCaptor; +import org.slf4j.event.Level; + +@SuppressWarnings("rawtypes") +class SdkObservableMeasurementTest { + + @RegisterExtension + final LogCapturer logs = + LogCapturer.create().captureForLogger(SdkObservableMeasurement.class.getName(), Level.DEBUG); + + private AsynchronousMetricStorage mockAsyncStorage1; + private RegisteredReader registeredReader1; + private SdkObservableMeasurement sdkObservableMeasurement; + private ArgumentCaptor measurementArgumentCaptor; + + @SuppressWarnings("unchecked") + private void setup(MemoryMode memoryMode) { + InstrumentationScopeInfo instrumentationScopeInfo = + InstrumentationScopeInfo.builder("test-scope").build(); + InstrumentDescriptor instrumentDescriptor = + InstrumentDescriptor.create( + "testCounter", + "an instrument for testing purposes", + "ms", + InstrumentType.COUNTER, + InstrumentValueType.LONG, + Advice.empty()); + + InMemoryMetricReader reader1 = + InMemoryMetricReader.builder() + .setAggregationTemporalitySelector(instrumentType -> CUMULATIVE) + .setMemoryMode(memoryMode) + .build(); + registeredReader1 = RegisteredReader.create(reader1, ViewRegistry.create()); + + InMemoryMetricReader reader2 = InMemoryMetricReader.builder().setMemoryMode(memoryMode).build(); + RegisteredReader registeredReader2 = RegisteredReader.create(reader2, ViewRegistry.create()); + + measurementArgumentCaptor = ArgumentCaptor.forClass(Measurement.class); + mockAsyncStorage1 = mock(AsynchronousMetricStorage.class); + when(mockAsyncStorage1.getRegisteredReader()).thenReturn(registeredReader1); + AsynchronousMetricStorage mockAsyncStorage2 = mock(AsynchronousMetricStorage.class); + when(mockAsyncStorage2.getRegisteredReader()).thenReturn(registeredReader2); + + sdkObservableMeasurement = + SdkObservableMeasurement.create( + instrumentationScopeInfo, + instrumentDescriptor, + Arrays.asList(mockAsyncStorage1, mockAsyncStorage2)); + } + + @Test + void recordLong_ImmutableData() { + setup(MemoryMode.IMMUTABLE_DATA); + + sdkObservableMeasurement.setActiveReader(registeredReader1, 0, 10); + + try { + sdkObservableMeasurement.record(5); + + verify(mockAsyncStorage1).record(measurementArgumentCaptor.capture()); + Measurement passedMeasurement = measurementArgumentCaptor.getValue(); + assertThat(passedMeasurement).isInstanceOf(ImmutableMeasurement.class); + assertThat(passedMeasurement.longValue()).isEqualTo(5); + assertThat(passedMeasurement.startEpochNanos()).isEqualTo(0); + assertThat(passedMeasurement.epochNanos()).isEqualTo(10); + } finally { + sdkObservableMeasurement.unsetActiveReader(); + } + } + + @Test + void recordDouble_ImmutableData() { + setup(MemoryMode.IMMUTABLE_DATA); + + sdkObservableMeasurement.setActiveReader(registeredReader1, 0, 10); + + try { + sdkObservableMeasurement.record(4.3); + + verify(mockAsyncStorage1).record(measurementArgumentCaptor.capture()); + Measurement passedMeasurement = measurementArgumentCaptor.getValue(); + assertThat(passedMeasurement).isInstanceOf(ImmutableMeasurement.class); + assertThat(passedMeasurement.doubleValue()).isEqualTo(4.3); + assertThat(passedMeasurement.startEpochNanos()).isEqualTo(0); + assertThat(passedMeasurement.epochNanos()).isEqualTo(10); + } finally { + sdkObservableMeasurement.unsetActiveReader(); + } + } + + @Test + void recordDouble_ReusableData() { + setup(MemoryMode.REUSABLE_DATA); + + sdkObservableMeasurement.setActiveReader(registeredReader1, 0, 10); + + try { + sdkObservableMeasurement.record(4.3); + + verify(mockAsyncStorage1).record(measurementArgumentCaptor.capture()); + Measurement firstMeasurement = measurementArgumentCaptor.getValue(); + assertThat(firstMeasurement).isInstanceOf(MutableMeasurement.class); + assertThat(firstMeasurement.doubleValue()).isEqualTo(4.3); + assertThat(firstMeasurement.startEpochNanos()).isEqualTo(0); + assertThat(firstMeasurement.epochNanos()).isEqualTo(10); + + sdkObservableMeasurement.record(5.3); + + verify(mockAsyncStorage1, times(2)).record(measurementArgumentCaptor.capture()); + Measurement secondMeasurement = measurementArgumentCaptor.getValue(); + assertThat(secondMeasurement).isInstanceOf(MutableMeasurement.class); + assertThat(secondMeasurement.doubleValue()).isEqualTo(5.3); + assertThat(secondMeasurement.startEpochNanos()).isEqualTo(0); + assertThat(secondMeasurement.epochNanos()).isEqualTo(10); + + // LeasedMeasurement should be re-used + assertThat(secondMeasurement).isSameAs(firstMeasurement); + } finally { + sdkObservableMeasurement.unsetActiveReader(); + } + } + + @Test + void recordLong_ReusableData() { + setup(MemoryMode.REUSABLE_DATA); + + sdkObservableMeasurement.setActiveReader(registeredReader1, 0, 10); + + try { + sdkObservableMeasurement.record(2); + + verify(mockAsyncStorage1).record(measurementArgumentCaptor.capture()); + Measurement firstMeasurement = measurementArgumentCaptor.getValue(); + assertThat(firstMeasurement).isInstanceOf(MutableMeasurement.class); + assertThat(firstMeasurement.longValue()).isEqualTo(2); + assertThat(firstMeasurement.startEpochNanos()).isEqualTo(0); + assertThat(firstMeasurement.epochNanos()).isEqualTo(10); + + sdkObservableMeasurement.record(6); + + verify(mockAsyncStorage1, times(2)).record(measurementArgumentCaptor.capture()); + Measurement secondMeasurement = measurementArgumentCaptor.getValue(); + assertThat(secondMeasurement).isInstanceOf(MutableMeasurement.class); + assertThat(secondMeasurement.longValue()).isEqualTo(6); + assertThat(secondMeasurement.startEpochNanos()).isEqualTo(0); + assertThat(secondMeasurement.epochNanos()).isEqualTo(10); + + // LeasedMeasurement should be re-used + assertThat(secondMeasurement).isSameAs(firstMeasurement); + } finally { + sdkObservableMeasurement.unsetActiveReader(); + } + } + + @Test + @SuppressLogger(SdkObservableMeasurement.class) + void recordDouble_NaN() { + setup(MemoryMode.REUSABLE_DATA); + sdkObservableMeasurement.setActiveReader(registeredReader1, 0, 10); + sdkObservableMeasurement.record(Double.NaN); + + verify(mockAsyncStorage1, never()).record(any()); + logs.assertContains( + "Instrument testCounter has recorded measurement Not-a-Number (NaN) value with attributes {}. Dropping measurement."); + } +} diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/SynchronousMetricStorageTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/SynchronousMetricStorageTest.java index 96f4d6274b7..0117af20b59 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/SynchronousMetricStorageTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/SynchronousMetricStorageTest.java @@ -5,26 +5,35 @@ package io.opentelemetry.sdk.metrics.internal.state; +import static io.opentelemetry.sdk.common.export.MemoryMode.IMMUTABLE_DATA; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import com.google.common.util.concurrent.AtomicDouble; +import com.google.common.util.concurrent.Uninterruptibles; import io.github.netmikey.logunit.api.LogCapturer; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.context.Context; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.InstrumentValueType; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.LongExemplarData; import io.opentelemetry.sdk.metrics.data.LongPointData; import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.data.PointData; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; +import io.opentelemetry.sdk.metrics.internal.aggregator.EmptyMetricData; import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; @@ -33,10 +42,23 @@ import io.opentelemetry.sdk.metrics.internal.view.AttributesProcessor; import io.opentelemetry.sdk.metrics.internal.view.ViewRegistry; import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.assertj.DoubleSumAssert; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import io.opentelemetry.sdk.testing.time.TestClock; +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.function.BiConsumer; +import java.util.stream.Stream; +import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.EnumSource; +import org.junit.jupiter.params.provider.MethodSource; +import org.slf4j.event.Level; @SuppressLogger(DefaultSynchronousMetricStorage.class) public class SynchronousMetricStorageTest { @@ -56,21 +78,61 @@ public class SynchronousMetricStorageTest { private static final int CARDINALITY_LIMIT = 25; @RegisterExtension - LogCapturer logs = LogCapturer.create().captureForType(DefaultSynchronousMetricStorage.class); + LogCapturer logs = + LogCapturer.create().captureForType(DefaultSynchronousMetricStorage.class, Level.DEBUG); - private final RegisteredReader deltaReader = - RegisteredReader.create(InMemoryMetricReader.createDelta(), ViewRegistry.create()); - private final RegisteredReader cumulativeReader = - RegisteredReader.create(InMemoryMetricReader.create(), ViewRegistry.create()); + private RegisteredReader deltaReader; + private RegisteredReader cumulativeReader; private final TestClock testClock = TestClock.create(); - private final Aggregator aggregator = - spy( - ((AggregatorFactory) Aggregation.sum()) - .createAggregator(DESCRIPTOR, ExemplarFilter.alwaysOff())); + private Aggregator aggregator; private final AttributesProcessor attributesProcessor = AttributesProcessor.noop(); - @Test - void attributesProcessor_applied() { + private void initialize(MemoryMode memoryMode) { + deltaReader = + RegisteredReader.create( + InMemoryMetricReader.builder() + .setAggregationTemporalitySelector(unused -> AggregationTemporality.DELTA) + .setMemoryMode(memoryMode) + .build(), + ViewRegistry.create()); + + cumulativeReader = + RegisteredReader.create( + InMemoryMetricReader.builder().setMemoryMode(memoryMode).build(), + ViewRegistry.create()); + + aggregator = + spy( + ((AggregatorFactory) Aggregation.sum()) + .createAggregator(DESCRIPTOR, ExemplarFilter.alwaysOff(), memoryMode)); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void recordDouble_NaN(MemoryMode memoryMode) { + initialize(memoryMode); + DefaultSynchronousMetricStorage storage = + new DefaultSynchronousMetricStorage<>( + cumulativeReader, + METRIC_DESCRIPTOR, + aggregator, + attributesProcessor, + CARDINALITY_LIMIT); + + storage.recordDouble(Double.NaN, Attributes.empty(), Context.current()); + + logs.assertContains( + "Instrument name has recorded measurement Not-a-Number (NaN) value with attributes {}. Dropping measurement."); + verify(aggregator, never()).createHandle(); + assertThat(storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, 10)) + .isEqualTo(EmptyMetricData.getInstance()); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void attributesProcessor_applied(MemoryMode memoryMode) { + initialize(memoryMode); + Attributes attributes = Attributes.builder().put("K", "V").build(); AttributesProcessor attributesProcessor = AttributesProcessor.append(Attributes.builder().put("modifiedK", "modifiedV").build()); @@ -93,8 +155,11 @@ void attributesProcessor_applied() { attributeEntry("K", "V"), attributeEntry("modifiedK", "modifiedV")))); } - @Test - void recordAndCollect_CumulativeDoesNotReset() { + @ParameterizedTest + @EnumSource(MemoryMode.class) + void recordAndCollect_CumulativeDoesNotReset(MemoryMode memoryMode) { + initialize(memoryMode); + DefaultSynchronousMetricStorage storage = new DefaultSynchronousMetricStorage<>( cumulativeReader, @@ -140,7 +205,9 @@ void recordAndCollect_CumulativeDoesNotReset() { } @Test - void recordAndCollect_DeltaResets() { + void recordAndCollect_DeltaResets_ImmutableData() { + initialize(IMMUTABLE_DATA); + DefaultSynchronousMetricStorage storage = new DefaultSynchronousMetricStorage<>( deltaReader, METRIC_DESCRIPTOR, aggregator, attributesProcessor, CARDINALITY_LIMIT); @@ -187,7 +254,107 @@ void recordAndCollect_DeltaResets() { } @Test - void recordAndCollect_CumulativeAtLimit() { + void recordAndCollect_DeltaResets_ReusableData() { + initialize(MemoryMode.REUSABLE_DATA); + + DefaultSynchronousMetricStorage storage = + new DefaultSynchronousMetricStorage<>( + deltaReader, METRIC_DESCRIPTOR, aggregator, attributesProcessor, CARDINALITY_LIMIT); + + // Record measurement and collect at time 10 + storage.recordDouble(3, Attributes.empty(), Context.current()); + verify(aggregator, times(1)).createHandle(); + assertThat(storage.getAggregatorHandlePool()).hasSize(0); + assertThat(storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, 10)) + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3))); + assertThat(storage.getAggregatorHandlePool()).hasSize(0); + + deltaReader.setLastCollectEpochNanos(10); + + // Record measurement and collect at time 30 + storage.recordDouble(3, Attributes.empty(), Context.current()); + + // We're switched to secondary map so a handle will be created + verify(aggregator, times(2)).createHandle(); + assertThat(storage.getAggregatorHandlePool()).hasSize(0); + assertThat(storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, 30)) + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(10).hasEpochNanos(30).hasValue(3))); + assertThat(storage.getAggregatorHandlePool()).hasSize(0); + + deltaReader.setLastCollectEpochNanos(30); + + // Record measurements and collect at time 35 + storage.recordDouble(2, Attributes.empty(), Context.current()); + storage.recordDouble(4, Attributes.of(AttributeKey.stringKey("foo"), "bar"), Context.current()); + + // We don't delete aggregator handles unless max cardinality reached, hence + // aggregator handle is still there, thus no handle was created for empty(), but it will for + // the "foo" + verify(aggregator, times(3)).createHandle(); + assertThat(storage.getAggregatorHandlePool()).hasSize(0); + + MetricData metricData = storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, 35); + assertThat(metricData).hasDoubleSumSatisfying(DoubleSumAssert::isDelta); + assertThat(metricData) + .hasDoubleSumSatisfying( + sum -> + sum.satisfies( + sumData -> + assertThat(sumData.getPoints()) + .hasSize(2) + .anySatisfy( + point -> { + assertThat(point.getStartEpochNanos()).isEqualTo(30); + assertThat(point.getEpochNanos()).isEqualTo(35); + assertThat(point.getValue()).isEqualTo(2); + assertThat(point.getAttributes()).isEqualTo(Attributes.empty()); + }) + .anySatisfy( + point -> { + assertThat(point.getStartEpochNanos()).isEqualTo(30); + assertThat(point.getEpochNanos()).isEqualTo(35); + assertThat(point.getValue()).isEqualTo(4); + assertThat(point.getAttributes()) + .isEqualTo( + Attributes.of(AttributeKey.stringKey("foo"), "bar")); + }))); + + assertThat(storage.getAggregatorHandlePool()).hasSize(0); + + deltaReader.setLastCollectEpochNanos(40); + storage.recordDouble(6, Attributes.of(AttributeKey.stringKey("foo"), "bar"), Context.current()); + + assertThat(storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, 45)) + .hasDoubleSumSatisfying( + sum -> + sum.satisfies( + sumData -> + assertThat(sumData.getPoints()) + .hasSize(1) + .allSatisfy( + point -> { + assertThat(point.getStartEpochNanos()).isEqualTo(40); + assertThat(point.getEpochNanos()).isEqualTo(45); + assertThat(point.getValue()).isEqualTo(6); + assertThat(point.getAttributes()) + .isEqualTo( + Attributes.of(AttributeKey.stringKey("foo"), "bar")); + }))); + } + + @ParameterizedTest + @EnumSource(MemoryMode.class) + void recordAndCollect_CumulativeAtLimit(MemoryMode memoryMode) { + initialize(memoryMode); + DefaultSynchronousMetricStorage storage = new DefaultSynchronousMetricStorage<>( cumulativeReader, @@ -256,7 +423,9 @@ void recordAndCollect_CumulativeAtLimit() { } @Test - void recordAndCollect_DeltaAtLimit() { + void recordAndCollect_DeltaAtLimit_ImmutableDataMemoryMode() { + initialize(IMMUTABLE_DATA); + DefaultSynchronousMetricStorage storage = new DefaultSynchronousMetricStorage<>( deltaReader, METRIC_DESCRIPTOR, aggregator, attributesProcessor, CARDINALITY_LIMIT); @@ -283,6 +452,7 @@ void recordAndCollect_DeltaAtLimit() { assertThat(point.getValue()).isEqualTo(3); }))); assertThat(storage.getAggregatorHandlePool()).hasSize(CARDINALITY_LIMIT - 1); + assertThat(logs.getEvents()).isEmpty(); deltaReader.setLastCollectEpochNanos(10); @@ -347,4 +517,322 @@ void recordAndCollect_DeltaAtLimit() { assertThat(storage.getAggregatorHandlePool()).hasSize(CARDINALITY_LIMIT); logs.assertContains("Instrument name has exceeded the maximum allowed cardinality"); } + + @Test + void recordAndCollect_DeltaAtLimit_ReusableDataMemoryMode() { + initialize(MemoryMode.REUSABLE_DATA); + + DefaultSynchronousMetricStorage storage = + new DefaultSynchronousMetricStorage<>( + deltaReader, METRIC_DESCRIPTOR, aggregator, attributesProcessor, CARDINALITY_LIMIT); + + // Record measurements for CARDINALITY_LIMIT - 1, since 1 slot is reserved for the overflow + // series + for (int i = 0; i < CARDINALITY_LIMIT - 1; i++) { + storage.recordDouble( + 3, Attributes.builder().put("key", "value" + i).build(), Context.current()); + } + verify(aggregator, times(CARDINALITY_LIMIT - 1)).createHandle(); + + // First collect + MetricData metricData = storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, 10); + + assertThat(metricData) + .hasDoubleSumSatisfying( + sum -> + sum.satisfies( + sumData -> + Assertions.assertThat(sumData.getPoints()) + .hasSize(CARDINALITY_LIMIT - 1) + .allSatisfy( + point -> { + Assertions.assertThat(point.getStartEpochNanos()).isEqualTo(0); + Assertions.assertThat(point.getEpochNanos()).isEqualTo(10); + Assertions.assertThat(point.getValue()).isEqualTo(3); + }))); + + assertThat(logs.getEvents()).isEmpty(); + + deltaReader.setLastCollectEpochNanos(10); + + // Record CARDINALITY_LIMIT measurements, causing one measurement to exceed the cardinality + // limit and fall into the overflow series + for (int i = 0; i < CARDINALITY_LIMIT; i++) { + storage.recordDouble( + 3, Attributes.builder().put("key", "value" + i).build(), Context.current()); + } + + // After first collection, we expect the secondary map which is empty to be used, + // hence handle creation will still take place + // The +1 is for the overflow handle + verify(aggregator, times((CARDINALITY_LIMIT - 1) * 2 + 1)).createHandle(); + + // Second collect + metricData = storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, 20); + + assertThat(metricData) + .hasDoubleSumSatisfying( + sum -> + sum.satisfies( + sumData -> + assertThat(sumData.getPoints()) + .hasSize(CARDINALITY_LIMIT) + .allSatisfy( + point -> { + assertThat(point.getStartEpochNanos()).isEqualTo(10); + assertThat(point.getEpochNanos()).isEqualTo(20); + assertThat(point.getValue()).isEqualTo(3); + }) + .noneMatch( + point -> + ("value" + CARDINALITY_LIMIT) + .equals( + point + .getAttributes() + .get(AttributeKey.stringKey("key")))) + .satisfiesOnlyOnce( + point -> + assertThat(point.getAttributes()) + .isEqualTo(MetricStorage.CARDINALITY_OVERFLOW)))); + + assertThat(storage.getAggregatorHandlePool()).isEmpty(); + + logs.assertContains("Instrument name has exceeded the maximum allowed cardinality"); + } + + @Test + void recordAndCollect_DeltaAtLimit_ReusableDataMemoryMode_ExpireUnused() { + initialize(MemoryMode.REUSABLE_DATA); + + DefaultSynchronousMetricStorage storage = + new DefaultSynchronousMetricStorage<>( + deltaReader, METRIC_DESCRIPTOR, aggregator, attributesProcessor, CARDINALITY_LIMIT); + + // 1st recording: Recording goes to active map + for (int i = 0; i < CARDINALITY_LIMIT - 1; i++) { + storage.recordDouble( + 3, Attributes.builder().put("key", "value" + i).build(), Context.current()); + } + + // This will switch next recordings to the secondary map (which is empty) + // by making it the active map + storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, 10); + + // 2nd recording + deltaReader.setLastCollectEpochNanos(10); + for (int i = 0; i < CARDINALITY_LIMIT - 1; i++) { + storage.recordDouble( + 3, Attributes.builder().put("key", "value" + i).build(), Context.current()); + } + + // This switches maps again, so next recordings will be to the first map + storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 10, 20); + + // 3rd recording: We're recording unseen attributes to a map we know is full, + // since it was filled during 1st recording + deltaReader.setLastCollectEpochNanos(20); + for (int i = CARDINALITY_LIMIT - 1; i < (CARDINALITY_LIMIT - 1) + 15; i++) { + storage.recordDouble( + 3, Attributes.builder().put("key", "value" + i).build(), Context.current()); + } + + MetricData metricData = storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 20, 30); + + assertOnlyOverflowWasRecorded(metricData, 20, 30, 15 * 3); + + // 4th recording: We're recording unseen attributes to a map we know is full, + // since it was filled during *2nd* recording + deltaReader.setLastCollectEpochNanos(30); + for (int i = CARDINALITY_LIMIT - 1; i < (CARDINALITY_LIMIT - 1) + 15; i++) { + storage.recordDouble( + 3, Attributes.builder().put("key", "value" + i).build(), Context.current()); + } + + metricData = storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 30, 40); + + assertOnlyOverflowWasRecorded(metricData, 30, 40, 15 * 3); + + // 5th recording: Map should be empty, since all handlers were removed due to + // no recording being done to them + deltaReader.setLastCollectEpochNanos(40); + for (int i = 0; i < 10; i++) { + storage.recordDouble( + 3, Attributes.builder().put("key", "value" + i).build(), Context.current()); + } + + metricData = storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 40, 50); + + assertNumberOfPoints(metricData, 10); + assertAllPointsWithValue(metricData, 40, 50, 3); + assertOverflowDoesNotExists(metricData); + + // 6th recording: Map should be empty (we switched to secondary map), since all handlers + // were removed due to no recordings being done to them + deltaReader.setLastCollectEpochNanos(50); + for (int i = 0; i < 12; i++) { + storage.recordDouble( + 4, Attributes.builder().put("key", "value" + i).build(), Context.current()); + } + + metricData = storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 50, 60); + + assertNumberOfPoints(metricData, 12); + assertAllPointsWithValue(metricData, 50, 60, 4); + assertOverflowDoesNotExists(metricData); + } + + @SuppressWarnings("SameParameterValue") + private static void assertOnlyOverflowWasRecorded( + MetricData metricData, long startTime, long endTime, double value) { + + assertThat(metricData) + .hasDoubleSumSatisfying( + sum -> + sum.satisfies( + sumData -> + assertThat(sumData.getPoints()) + .hasSize(1) + .allSatisfy( + point -> { + assertThat(point.getStartEpochNanos()).isEqualTo(startTime); + assertThat(point.getEpochNanos()).isEqualTo(endTime); + assertThat(point.getValue()).isEqualTo(value); + assertThat(point.getAttributes()) + .isEqualTo(MetricStorage.CARDINALITY_OVERFLOW); + }))); + } + + private static void assertNumberOfPoints(MetricData metricData, int numberOfPoints) { + assertThat(metricData) + .hasDoubleSumSatisfying( + sum -> + sum.satisfies(sumData -> assertThat(sumData.getPoints()).hasSize(numberOfPoints))); + } + + private static void assertAllPointsWithValue( + MetricData metricData, long startTime, long endTime, double value) { + assertThat(metricData) + .hasDoubleSumSatisfying( + sum -> + sum.satisfies( + sumData -> + assertThat(sumData.getPoints()) + .allSatisfy( + point -> { + assertThat(point.getStartEpochNanos()).isEqualTo(startTime); + assertThat(point.getEpochNanos()).isEqualTo(endTime); + assertThat(point.getValue()).isEqualTo(value); + }))); + } + + private static void assertOverflowDoesNotExists(MetricData metricData) { + assertThat(metricData) + .hasDoubleSumSatisfying( + sum -> + sum.satisfies( + sumData -> + assertThat(sumData.getPoints()) + .noneMatch( + point -> + point + .getAttributes() + .equals(MetricStorage.CARDINALITY_OVERFLOW)))); + } + + @ParameterizedTest + @MethodSource("concurrentStressTestArguments") + void recordAndCollect_concurrentStressTest( + DefaultSynchronousMetricStorage storage, BiConsumer collect) { + // Define record threads. Each records a value of 1.0, 2000 times + List threads = new ArrayList<>(); + CountDownLatch latch = new CountDownLatch(4); + for (int i = 0; i < 4; i++) { + Thread thread = + new Thread( + () -> { + for (int j = 0; j < 2000; j++) { + storage.recordDouble(1.0, Attributes.empty(), Context.current()); + Uninterruptibles.sleepUninterruptibly(Duration.ofMillis(1)); + } + latch.countDown(); + }); + threads.add(thread); + } + + // Define collect thread. Collect thread collects and aggregates the + AtomicDouble cumulativeSum = new AtomicDouble(); + Thread collectThread = + new Thread( + () -> { + int extraCollects = 0; + // If we terminate when latch.count() == 0, the last collect may have occurred before + // the last recorded measurement. To ensure we collect all measurements, we collect + // one extra time after latch.count() == 0. + while (latch.getCount() != 0 || extraCollects <= 1) { + Uninterruptibles.sleepUninterruptibly(Duration.ofMillis(1)); + MetricData metricData = + storage.collect(Resource.empty(), InstrumentationScopeInfo.empty(), 0, 1); + if (!metricData.isEmpty()) { + metricData.getDoubleSumData().getPoints().stream() + .findFirst() + .ifPresent(pointData -> collect.accept(pointData.getValue(), cumulativeSum)); + } + if (latch.getCount() == 0) { + extraCollects++; + } + } + }); + + // Start all the threads + collectThread.start(); + threads.forEach(Thread::start); + + // Wait for the collect thread to end, which collects until the record threads are done + Uninterruptibles.joinUninterruptibly(collectThread); + + assertThat(cumulativeSum.get()).isEqualTo(8000.0); + } + + private static Stream concurrentStressTestArguments() { + List argumentsList = new ArrayList<>(); + + for (MemoryMode memoryMode : MemoryMode.values()) { + Aggregator aggregator = + ((AggregatorFactory) Aggregation.sum()) + .createAggregator(DESCRIPTOR, ExemplarFilter.alwaysOff(), memoryMode); + + argumentsList.add( + Arguments.of( + // Delta + new DefaultSynchronousMetricStorage<>( + RegisteredReader.create( + InMemoryMetricReader.builder() + .setAggregationTemporalitySelector(unused -> AggregationTemporality.DELTA) + .setMemoryMode(memoryMode) + .build(), + ViewRegistry.create()), + METRIC_DESCRIPTOR, + aggregator, + AttributesProcessor.noop(), + CARDINALITY_LIMIT), + (BiConsumer) + (value, cumulativeCount) -> cumulativeCount.addAndGet(value))); + + argumentsList.add( + Arguments.of( + // Cumulative + new DefaultSynchronousMetricStorage<>( + RegisteredReader.create( + InMemoryMetricReader.builder().setMemoryMode(memoryMode).build(), + ViewRegistry.create()), + METRIC_DESCRIPTOR, + aggregator, + AttributesProcessor.noop(), + CARDINALITY_LIMIT), + (BiConsumer) + (value, cumulativeCount) -> cumulativeCount.set(value))); + } + + return argumentsList.stream(); + } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/AdviceAttributesProcessorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/AdviceAttributesProcessorTest.java index 294c05ea157..d809c8a3794 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/AdviceAttributesProcessorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/AdviceAttributesProcessorTest.java @@ -24,6 +24,19 @@ void doesNotUseContext() { assertThat(new AdviceAttributesProcessor(emptyList()).usesContext()).isFalse(); } + @Test + void noExtraAttributes() { + AttributesProcessor processor = + new AdviceAttributesProcessor(asList(stringKey("abc"), stringKey("def"))); + + Attributes result = + processor.process( + Attributes.builder().put(stringKey("abc"), "abc").put(stringKey("def"), "def").build(), + Context.root()); + + assertThat(result).containsOnly(entry(stringKey("abc"), "abc"), entry(stringKey("def"), "def")); + } + @Test void removeUnwantedAttributes() { AttributesProcessor processor = diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/Base2ExponentialHistogramAggregationTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/Base2ExponentialHistogramAggregationTest.java index f6f44e9c72c..0479baafde3 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/Base2ExponentialHistogramAggregationTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/Base2ExponentialHistogramAggregationTest.java @@ -8,6 +8,19 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.Aggregation; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.InstrumentValueType; +import io.opentelemetry.sdk.metrics.data.ExemplarData; +import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData; +import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; +import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; +import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorHandle; +import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; +import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; +import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarFilter; import org.junit.jupiter.api.Test; class Base2ExponentialHistogramAggregationTest { @@ -22,12 +35,39 @@ void goodConfig() { void invalidConfig_Throws() { assertThatThrownBy(() -> Base2ExponentialHistogramAggregation.create(0, 20)) .isInstanceOf(IllegalArgumentException.class) - .hasMessage("maxBuckets must be > 0"); - assertThatThrownBy(() -> Base2ExponentialHistogramAggregation.create(1, 21)) + .hasMessage("maxBuckets must be >= 2"); + assertThatThrownBy(() -> Base2ExponentialHistogramAggregation.create(2, 21)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("maxScale must be -10 <= x <= 20"); - assertThatThrownBy(() -> Base2ExponentialHistogramAggregation.create(1, -11)) + assertThatThrownBy(() -> Base2ExponentialHistogramAggregation.create(2, -11)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("maxScale must be -10 <= x <= 20"); } + + @Test + void minimumBucketsCanAccommodateMaxRange() { + Aggregation aggregation = Base2ExponentialHistogramAggregation.create(2, 20); + Aggregator aggregator = + ((AggregatorFactory) aggregation) + .createAggregator( + InstrumentDescriptor.create( + "foo", + "description", + "unit", + InstrumentType.HISTOGRAM, + InstrumentValueType.DOUBLE, + Advice.empty()), + ExemplarFilter.alwaysOff(), + MemoryMode.IMMUTABLE_DATA); + AggregatorHandle handle = + aggregator.createHandle(); + // Record max range + handle.recordDouble(Double.MIN_VALUE); + handle.recordDouble(Double.MAX_VALUE); + + ExponentialHistogramPointData pointData = + handle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true); + assertThat(pointData.getCount()).isEqualTo(2); + assertThat(pointData.getScale()).isEqualTo(-11); + } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/FilteredAttributesTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/FilteredAttributesTest.java new file mode 100644 index 00000000000..b8bc0d10932 --- /dev/null +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/FilteredAttributesTest.java @@ -0,0 +1,192 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics.internal.view; + +import static io.opentelemetry.api.common.AttributeKey.longKey; +import static io.opentelemetry.api.common.AttributeKey.stringKey; +import static org.assertj.core.api.Assertions.assertThat; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.testing.EqualsTester; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; +import org.junit.jupiter.api.RepeatedTest; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +/** Unit tests for {@link FilteredAttributes}s. */ +@SuppressWarnings("rawtypes") +class FilteredAttributesTest { + + private static final AttributeKey KEY1 = stringKey("key1"); + private static final AttributeKey KEY2 = stringKey("key2"); + private static final AttributeKey KEY3 = stringKey("key3"); + private static final AttributeKey KEY4 = stringKey("key4"); + private static final AttributeKey KEY2_LONG = longKey("key2"); + private static final Set> ALL_KEYS = + ImmutableSet.of(KEY1, KEY2, KEY3, KEY4, KEY2_LONG); + private static final Attributes ALL_ATTRIBUTES = + Attributes.of(KEY1, "value1", KEY2, "value2", KEY2_LONG, 222L, KEY3, "value3"); + private static final Attributes FILTERED_ATTRIBUTES_ONE = + FilteredAttributes.create(ALL_ATTRIBUTES, ImmutableSet.of(KEY1)); + private static final Attributes FILTERED_ATTRIBUTES_TWO = + FilteredAttributes.create(ALL_ATTRIBUTES, ImmutableSet.of(KEY1, KEY2_LONG)); + private static final Attributes FILTERED_ATTRIBUTES_THREE = + FilteredAttributes.create(ALL_ATTRIBUTES, ImmutableSet.of(KEY1, KEY2_LONG, KEY3)); + private static final Attributes FILTERED_ATTRIBUTES_FOUR = + FilteredAttributes.create(ALL_ATTRIBUTES, ImmutableSet.of(KEY1, KEY2_LONG, KEY3, KEY4)); + private static final Attributes FILTERED_ATTRIBUTES_EMPTY_SOURCE = + FilteredAttributes.create(Attributes.empty(), ImmutableSet.of(KEY1)); + private static final Attributes FILTERED_ATTRIBUTES_EMPTY = + FilteredAttributes.create(ALL_ATTRIBUTES, Collections.emptySet()); + + @ParameterizedTest + @MethodSource("mapArgs") + void forEach(Attributes filteredAttributes, Map, Object> expectedMapEntries) { + Map entriesSeen = new HashMap<>(); + filteredAttributes.forEach(entriesSeen::put); + assertThat(entriesSeen).isEqualTo(expectedMapEntries); + } + + @ParameterizedTest + @MethodSource("mapArgs") + void asMap(Attributes filteredAttributes, Map, Object> expectedMapEntries) { + assertThat(filteredAttributes.asMap()).isEqualTo(expectedMapEntries); + } + + @ParameterizedTest + @MethodSource("mapArgs") + void size(Attributes filteredAttributes, Map, Object> expectedMapEntries) { + assertThat(filteredAttributes.size()).isEqualTo(expectedMapEntries.size()); + } + + @ParameterizedTest + @MethodSource("mapArgs") + void isEmpty(Attributes filteredAttributes, Map, Object> expectedMapEntries) { + assertThat(filteredAttributes.isEmpty()).isEqualTo(expectedMapEntries.isEmpty()); + } + + @ParameterizedTest + @MethodSource("mapArgs") + void get(Attributes filteredAttributes, Map, Object> expectedMapEntries) { + for (AttributeKey key : ALL_KEYS) { + Object expectedValue = expectedMapEntries.get(key); + assertThat(filteredAttributes.get(key)).isEqualTo(expectedValue); + } + } + + @ParameterizedTest + @MethodSource("mapArgs") + void toBuilder(Attributes filteredAttributes, Map, Object> expectedMapEntries) { + Attributes attributes = filteredAttributes.toBuilder().build(); + assertThat(attributes.asMap()).isEqualTo(expectedMapEntries); + } + + private static Stream mapArgs() { + return Stream.of( + Arguments.of(FILTERED_ATTRIBUTES_ONE, ImmutableMap.of(KEY1, "value1")), + Arguments.of(FILTERED_ATTRIBUTES_TWO, ImmutableMap.of(KEY1, "value1", KEY2_LONG, 222L)), + Arguments.of( + FILTERED_ATTRIBUTES_THREE, + ImmutableMap.of(KEY1, "value1", KEY2_LONG, 222L, KEY3, "value3")), + Arguments.of( + FILTERED_ATTRIBUTES_FOUR, + ImmutableMap.of(KEY1, "value1", KEY2_LONG, 222L, KEY3, "value3")), + Arguments.of(FILTERED_ATTRIBUTES_EMPTY_SOURCE, Collections.emptyMap()), + Arguments.of(FILTERED_ATTRIBUTES_EMPTY, Collections.emptyMap())); + } + + @Test + void stringRepresentation() { + assertThat(FILTERED_ATTRIBUTES_ONE.toString()).isEqualTo("FilteredAttributes{key1=value1}"); + assertThat(FILTERED_ATTRIBUTES_TWO.toString()) + .isEqualTo("FilteredAttributes{key1=value1,key2=222}"); + assertThat(FILTERED_ATTRIBUTES_THREE.toString()) + .isEqualTo("FilteredAttributes{key1=value1,key2=222,key3=value3}"); + assertThat(FILTERED_ATTRIBUTES_FOUR.toString()) + .isEqualTo("FilteredAttributes{key1=value1,key2=222,key3=value3}"); + assertThat(FILTERED_ATTRIBUTES_EMPTY_SOURCE.toString()).isEqualTo("{}"); + assertThat(FILTERED_ATTRIBUTES_EMPTY.toString()).isEqualTo("{}"); + } + + /** + * Test behavior of attributes with more than the 32 limit of FilteredAttributes.filteredIndices. + */ + @RepeatedTest(10) + void largeAttributes() { + Set> allKeys = new HashSet<>(); + AttributesBuilder allAttributesBuilder = Attributes.builder(); + IntStream.range(0, 100) + .forEach( + i -> { + AttributeKey key = stringKey("key" + i); + allKeys.add(key); + allAttributesBuilder.put(key, "value" + i); + }); + Attributes allAttributes = allAttributesBuilder.build(); + + Attributes empty = FilteredAttributes.create(allAttributes, Collections.emptySet()); + assertThat(empty.size()).isEqualTo(0); + assertThat(empty.isEmpty()).isTrue(); + + Set> oneKey = allKeys.stream().limit(1).collect(Collectors.toSet()); + Attributes one = FilteredAttributes.create(allAttributes, oneKey); + assertThat(one.size()).isEqualTo(1); + assertThat(one.isEmpty()).isFalse(); + allKeys.stream() + .forEach( + key -> { + if (oneKey.contains(key)) { + assertThat(one.get(key)).isNotNull(); + } else { + assertThat(one.get(key)).isNull(); + } + }); + + Set> tenKeys = allKeys.stream().limit(10).collect(Collectors.toSet()); + Attributes ten = FilteredAttributes.create(allAttributes, tenKeys); + assertThat(ten.size()).isEqualTo(10); + assertThat(ten.isEmpty()).isFalse(); + allKeys.stream() + .forEach( + key -> { + if (tenKeys.contains(key)) { + assertThat(ten.get(key)).isNotNull(); + } else { + assertThat(ten.get(key)).isNull(); + } + }); + } + + @Test + void equalsAndHashCode() { + new EqualsTester() + .addEqualityGroup( + FILTERED_ATTRIBUTES_ONE, + FilteredAttributes.create(Attributes.of(KEY1, "value1"), Collections.singleton(KEY1)), + FilteredAttributes.create(Attributes.of(KEY1, "value1"), ImmutableSet.of(KEY1, KEY2)), + FilteredAttributes.create( + Attributes.of(KEY1, "value1", KEY2, "value2"), Collections.singleton(KEY1)), + FilteredAttributes.create( + Attributes.of(KEY1, "value1", KEY2_LONG, 222L), Collections.singleton(KEY1))) + .addEqualityGroup(FILTERED_ATTRIBUTES_TWO) + .addEqualityGroup(FILTERED_ATTRIBUTES_THREE, FILTERED_ATTRIBUTES_FOUR) + .addEqualityGroup(FILTERED_ATTRIBUTES_EMPTY, FILTERED_ATTRIBUTES_EMPTY_SOURCE) + .testEquals(); + } +} diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/ViewRegistryTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/ViewRegistryTest.java index abba1c32133..ee1a61d1d35 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/ViewRegistryTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/view/ViewRegistryTest.java @@ -7,7 +7,6 @@ import static io.opentelemetry.api.common.AttributeKey.stringKey; import static io.opentelemetry.sdk.metrics.internal.view.ViewRegistry.DEFAULT_REGISTERED_VIEW; -import static io.opentelemetry.sdk.metrics.internal.view.ViewRegistry.toGlobPatternPredicate; import static org.assertj.core.api.Assertions.assertThat; import io.github.netmikey.logunit.api.LogCapturer; @@ -19,11 +18,11 @@ import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.InstrumentValueType; import io.opentelemetry.sdk.metrics.View; +import io.opentelemetry.sdk.metrics.export.CardinalityLimitSelector; import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; import io.opentelemetry.sdk.metrics.internal.debug.SourceInfo; import io.opentelemetry.sdk.metrics.internal.descriptor.Advice; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; -import io.opentelemetry.sdk.metrics.internal.export.CardinalityLimitSelector; import io.opentelemetry.sdk.metrics.internal.state.MetricStorage; import java.util.Arrays; import java.util.Collections; @@ -435,6 +434,7 @@ void defaults() { } @Test + @SuppressLogger(ViewRegistry.class) void findViews_ApplyAdvice() { // use incompatible aggregation for histogram DefaultAggregationSelector aggregationSelector = @@ -544,26 +544,4 @@ void findViews_ApplyAdvice() { INSTRUMENTATION_SCOPE_INFO)) .isEqualTo(Collections.singletonList(DEFAULT_REGISTERED_VIEW)); } - - @Test - void matchesName() { - assertThat(toGlobPatternPredicate("foo").test("foo")).isTrue(); - assertThat(toGlobPatternPredicate("foo").test("Foo")).isTrue(); - assertThat(toGlobPatternPredicate("foo").test("bar")).isFalse(); - assertThat(toGlobPatternPredicate("fo?").test("foo")).isTrue(); - assertThat(toGlobPatternPredicate("fo??").test("fooo")).isTrue(); - assertThat(toGlobPatternPredicate("fo?").test("fob")).isTrue(); - assertThat(toGlobPatternPredicate("fo?").test("fooo")).isFalse(); - assertThat(toGlobPatternPredicate("*").test("foo")).isTrue(); - assertThat(toGlobPatternPredicate("*").test("bar")).isTrue(); - assertThat(toGlobPatternPredicate("*").test("baz")).isTrue(); - assertThat(toGlobPatternPredicate("*").test("foo.bar.baz")).isTrue(); - assertThat(toGlobPatternPredicate("fo*").test("fo")).isTrue(); - assertThat(toGlobPatternPredicate("fo*").test("foo")).isTrue(); - assertThat(toGlobPatternPredicate("fo*").test("fooo")).isTrue(); - assertThat(toGlobPatternPredicate("fo*").test("foo.bar.baz")).isTrue(); - assertThat(toGlobPatternPredicate("f()[]$^.{}|").test("f()[]$^.{}|")).isTrue(); - assertThat(toGlobPatternPredicate("f()[]$^.{}|?").test("f()[]$^.{}|o")).isTrue(); - assertThat(toGlobPatternPredicate("f()[]$^.{}|*").test("f()[]$^.{}|ooo")).isTrue(); - } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/AttributesAdviceTest.java b/sdk/metrics/src/testIncubating/java/io/opentelemetry/sdk/metrics/AttributesAdviceTest.java similarity index 71% rename from sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/AttributesAdviceTest.java rename to sdk/metrics/src/testIncubating/java/io/opentelemetry/sdk/metrics/AttributesAdviceTest.java index efc1aff53f5..f30a9204878 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/AttributesAdviceTest.java +++ b/sdk/metrics/src/testIncubating/java/io/opentelemetry/sdk/metrics/AttributesAdviceTest.java @@ -11,11 +11,20 @@ import static java.util.Arrays.asList; import static org.junit.jupiter.params.provider.Arguments.arguments; -import com.google.common.util.concurrent.AtomicDouble; +import io.opentelemetry.api.baggage.Baggage; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleCounterBuilder; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleGaugeBuilder; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleHistogramBuilder; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleUpDownCounterBuilder; +import io.opentelemetry.api.incubator.metrics.ExtendedLongCounterBuilder; +import io.opentelemetry.api.incubator.metrics.ExtendedLongGaugeBuilder; +import io.opentelemetry.api.incubator.metrics.ExtendedLongHistogramBuilder; +import io.opentelemetry.api.incubator.metrics.ExtendedLongUpDownCounterBuilder; import io.opentelemetry.api.metrics.DoubleCounter; import io.opentelemetry.api.metrics.DoubleCounterBuilder; +import io.opentelemetry.api.metrics.DoubleGauge; import io.opentelemetry.api.metrics.DoubleGaugeBuilder; import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.DoubleHistogramBuilder; @@ -23,19 +32,14 @@ import io.opentelemetry.api.metrics.DoubleUpDownCounterBuilder; import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.LongCounterBuilder; +import io.opentelemetry.api.metrics.LongGauge; import io.opentelemetry.api.metrics.LongGaugeBuilder; import io.opentelemetry.api.metrics.LongHistogram; import io.opentelemetry.api.metrics.LongHistogramBuilder; import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.metrics.LongUpDownCounterBuilder; -import io.opentelemetry.extension.incubator.metrics.ExtendedDoubleCounterBuilder; -import io.opentelemetry.extension.incubator.metrics.ExtendedDoubleGaugeBuilder; -import io.opentelemetry.extension.incubator.metrics.ExtendedDoubleHistogramBuilder; -import io.opentelemetry.extension.incubator.metrics.ExtendedDoubleUpDownCounterBuilder; -import io.opentelemetry.extension.incubator.metrics.ExtendedLongCounterBuilder; -import io.opentelemetry.extension.incubator.metrics.ExtendedLongGaugeBuilder; -import io.opentelemetry.extension.incubator.metrics.ExtendedLongHistogramBuilder; -import io.opentelemetry.extension.incubator.metrics.ExtendedLongUpDownCounterBuilder; +import io.opentelemetry.context.Scope; +import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil; import io.opentelemetry.sdk.testing.assertj.AbstractPointAssert; import io.opentelemetry.sdk.testing.assertj.DoublePointAssert; import io.opentelemetry.sdk.testing.assertj.HistogramPointAssert; @@ -43,8 +47,6 @@ import io.opentelemetry.sdk.testing.assertj.MetricAssert; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import java.util.List; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.stream.Stream; import javax.annotation.Nullable; @@ -140,6 +142,81 @@ void instrumentWithAdviceAndViews( equalTo(stringKey("key2"), "2"), equalTo(stringKey("key3"), "3")))); } + @ParameterizedTest + @ArgumentsSource(InstrumentsProvider.class) + void instrumentWithAdviceAndDescriptionViews( + InstrumentFactory instrumentFactory, PointsAssert> pointsAssert) { + InMemoryMetricReader reader = InMemoryMetricReader.create(); + // Register a view which sets a description. Since any matching view supersedes any instrument + // advice, the attribute advice is ignored and all attributes are recorded. + meterProvider = + SdkMeterProvider.builder() + .registerMetricReader(reader) + .registerView( + InstrumentSelector.builder().setName("test").build(), + View.builder().setDescription("description").build()) + .build(); + + Instrument instrument = + instrumentFactory.create( + meterProvider, "test", asList(stringKey("key1"), stringKey("key2"))); + instrument.record(1, ATTRIBUTES); + + assertThat(reader.collectAllMetrics()) + .satisfiesExactly( + metric -> + pointsAssert.hasPointSatisfying( + assertThat(metric), + point -> + point.hasAttributesSatisfyingExactly( + equalTo(stringKey("key1"), "1"), + equalTo(stringKey("key2"), "2"), + equalTo(stringKey("key3"), "3")))); + } + + @ParameterizedTest + @ArgumentsSource(InstrumentsProvider.class) + void instrumentWithAdviceAndBaggage( + InstrumentFactory instrumentFactory, PointsAssert> pointsAssert) { + InMemoryMetricReader reader = InMemoryMetricReader.create(); + SdkMeterProviderBuilder builder = SdkMeterProvider.builder(); + // Register a view which appends a baggage entry. Since any matching view supersedes any + // instrument advice, the attribute advice is ignored and all attributes + the baggage entry are + // recorded. + ViewBuilder viewBuilder = View.builder(); + SdkMeterProviderUtil.appendFilteredBaggageAttributes( + viewBuilder, name -> name.equals("baggage1")); + meterProvider = + builder + .registerMetricReader(reader) + .registerView(InstrumentSelector.builder().setName("*").build(), viewBuilder.build()) + .build(); + + Instrument instrument = + instrumentFactory.create( + meterProvider, "test", asList(stringKey("key1"), stringKey("key2"))); + try (Scope unused = + Baggage.current().toBuilder() + .put("baggage1", "value1") + .put("baggage2", "value2") + .build() + .makeCurrent()) { + instrument.record(1, ATTRIBUTES); + } + + assertThat(reader.collectAllMetrics()) + .satisfiesExactly( + metric -> + pointsAssert.hasPointSatisfying( + assertThat(metric), + point -> + point.hasAttributesSatisfyingExactly( + equalTo(stringKey("key1"), "1"), + equalTo(stringKey("key2"), "2"), + equalTo(stringKey("key3"), "3"), + equalTo(stringKey("baggage1"), "value1")))); + } + static final class InstrumentsProvider implements ArgumentsProvider { @Override @@ -153,7 +230,7 @@ public Stream provideArguments(ExtensionContext context) { meterProvider.get("meter").counterBuilder(name).ofDoubles(); if (attributesAdvice != null) { ((ExtendedDoubleCounterBuilder) doubleCounterBuilder) - .setAdvice(advice -> advice.setAttributes(attributesAdvice)); + .setAttributesAdvice(attributesAdvice); } DoubleCounter counter = doubleCounterBuilder.build(); return counter::add; @@ -170,7 +247,7 @@ public Stream provideArguments(ExtensionContext context) { meterProvider.get("meter").counterBuilder(name); if (attributesAdvice != null) { ((ExtendedLongCounterBuilder) doubleCounterBuilder) - .setAdvice(advice -> advice.setAttributes(attributesAdvice)); + .setAttributesAdvice(attributesAdvice); } LongCounter counter = doubleCounterBuilder.build(); return counter::add; @@ -187,17 +264,10 @@ public Stream provideArguments(ExtensionContext context) { meterProvider.get("meter").gaugeBuilder(name); if (attributesAdvice != null) { ((ExtendedDoubleGaugeBuilder) doubleGaugeBuilder) - .setAdvice(advice -> advice.setAttributes(attributesAdvice)); + .setAttributesAdvice(attributesAdvice); } - AtomicDouble valueRef = new AtomicDouble(); - AtomicReference attributesRef = new AtomicReference<>(); - doubleGaugeBuilder.buildWithCallback( - measurement -> - measurement.record(valueRef.doubleValue(), attributesRef.get())); - return (value, attributes) -> { - valueRef.set((double) value); - attributesRef.set(attributes); - }; + DoubleGauge gauge = doubleGaugeBuilder.build(); + return gauge::set; }, (PointsAssert) (metricAssert, assertions) -> @@ -207,21 +277,14 @@ public Stream provideArguments(ExtensionContext context) { arguments( (InstrumentFactory) (meterProvider, name, attributesAdvice) -> { - LongGaugeBuilder doubleGaugeBuilder = + LongGaugeBuilder longGaugeBuilder = meterProvider.get("meter").gaugeBuilder(name).ofLongs(); if (attributesAdvice != null) { - ((ExtendedLongGaugeBuilder) doubleGaugeBuilder) - .setAdvice(advice -> advice.setAttributes(attributesAdvice)); + ((ExtendedLongGaugeBuilder) longGaugeBuilder) + .setAttributesAdvice(attributesAdvice); } - AtomicLong valueRef = new AtomicLong(); - AtomicReference attributesRef = new AtomicReference<>(); - doubleGaugeBuilder.buildWithCallback( - measurement -> - measurement.record(valueRef.longValue(), attributesRef.get())); - return (value, attributes) -> { - valueRef.set(value); - attributesRef.set(attributes); - }; + LongGauge gauge = longGaugeBuilder.build(); + return gauge::set; }, (PointsAssert) (metricAssert, assertions) -> @@ -235,7 +298,7 @@ public Stream provideArguments(ExtensionContext context) { meterProvider.get("meter").histogramBuilder(name); if (attributesAdvice != null) { ((ExtendedDoubleHistogramBuilder) doubleHistogramBuilder) - .setAdvice(advice -> advice.setAttributes(attributesAdvice)); + .setAttributesAdvice(attributesAdvice); } DoubleHistogram histogram = doubleHistogramBuilder.build(); return histogram::record; @@ -252,7 +315,7 @@ public Stream provideArguments(ExtensionContext context) { meterProvider.get("meter").histogramBuilder(name).ofLongs(); if (attributesAdvice != null) { ((ExtendedLongHistogramBuilder) doubleHistogramBuilder) - .setAdvice(advice -> advice.setAttributes(attributesAdvice)); + .setAttributesAdvice(attributesAdvice); } LongHistogram histogram = doubleHistogramBuilder.build(); return histogram::record; @@ -269,7 +332,7 @@ public Stream provideArguments(ExtensionContext context) { meterProvider.get("meter").upDownCounterBuilder(name).ofDoubles(); if (attributesAdvice != null) { ((ExtendedDoubleUpDownCounterBuilder) doubleUpDownCounterBuilder) - .setAdvice(advice -> advice.setAttributes(attributesAdvice)); + .setAttributesAdvice(attributesAdvice); } DoubleUpDownCounter upDownCounter = doubleUpDownCounterBuilder.build(); return upDownCounter::add; @@ -286,7 +349,7 @@ public Stream provideArguments(ExtensionContext context) { meterProvider.get("meter").upDownCounterBuilder(name); if (attributesAdvice != null) { ((ExtendedLongUpDownCounterBuilder) doubleUpDownCounterBuilder) - .setAdvice(advice -> advice.setAttributes(attributesAdvice)); + .setAttributesAdvice(attributesAdvice); } LongUpDownCounter upDownCounter = doubleUpDownCounterBuilder.build(); return upDownCounter::add; diff --git a/sdk/metrics/src/testIncubating/java/io/opentelemetry/sdk/metrics/MeterConfigTest.java b/sdk/metrics/src/testIncubating/java/io/opentelemetry/sdk/metrics/MeterConfigTest.java new file mode 100644 index 00000000000..aaaeaf5f85d --- /dev/null +++ b/sdk/metrics/src/testIncubating/java/io/opentelemetry/sdk/metrics/MeterConfigTest.java @@ -0,0 +1,243 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.metrics; + +import static io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder.nameEquals; +import static io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder.nameMatchesGlob; +import static io.opentelemetry.sdk.metrics.internal.MeterConfig.defaultConfig; +import static io.opentelemetry.sdk.metrics.internal.MeterConfig.disabled; +import static io.opentelemetry.sdk.metrics.internal.MeterConfig.enabled; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static java.util.stream.Collectors.groupingBy; + +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleCounter; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleGauge; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleHistogram; +import io.opentelemetry.api.incubator.metrics.ExtendedDoubleUpDownCounter; +import io.opentelemetry.api.incubator.metrics.ExtendedLongCounter; +import io.opentelemetry.api.incubator.metrics.ExtendedLongGauge; +import io.opentelemetry.api.incubator.metrics.ExtendedLongHistogram; +import io.opentelemetry.api.incubator.metrics.ExtendedLongUpDownCounter; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.internal.MeterConfig; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Stream; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class MeterConfigTest { + + @Test + void disableScopes() { + InMemoryMetricReader reader = InMemoryMetricReader.create(); + SdkMeterProvider meterProvider = + SdkMeterProvider.builder() + // Disable meterB. Since meters are enabled by default, meterA and meterC are enabled. + .addMeterConfiguratorCondition(nameEquals("meterB"), disabled()) + .registerMetricReader(reader) + // Register drop aggregation for all instruments of meterD. Instruments are disabled if + // their relevant MeterConfig is disabled, or if there are no resolved views which + // consume the measurements. + .registerView( + InstrumentSelector.builder().setMeterName("meterD").build(), + View.builder().setAggregation(Aggregation.drop()).build()) + .build(); + + Meter meterA = meterProvider.get("meterA"); + Meter meterB = meterProvider.get("meterB"); + Meter meterC = meterProvider.get("meterC"); + Meter meterD = meterProvider.get("meterD"); + AtomicLong meterAInvocations = new AtomicLong(); + AtomicLong meterBInvocations = new AtomicLong(); + AtomicLong meterCInvocations = new AtomicLong(); + AtomicLong meterDInvocations = new AtomicLong(); + + // Record measurements to each instrument type + recordToMeterInstruments(meterA, meterAInvocations); + recordToMeterInstruments(meterB, meterBInvocations); + recordToMeterInstruments(meterC, meterCInvocations); + recordToMeterInstruments(meterD, meterDInvocations); + + // Only metrics from meterA and meterC should be seen + assertThat(reader.collectAllMetrics()) + .satisfies( + metrics -> { + Map> metricsByScope = + metrics.stream().collect(groupingBy(MetricData::getInstrumentationScopeInfo)); + assertThat(metricsByScope.get(InstrumentationScopeInfo.create("meterA"))).hasSize(14); + assertThat(metricsByScope.get(InstrumentationScopeInfo.create("meterB"))).isNull(); + assertThat(metricsByScope.get(InstrumentationScopeInfo.create("meterC"))).hasSize(14); + assertThat(metricsByScope.get(InstrumentationScopeInfo.create("meterD"))).isNull(); + }); + // Only async callbacks from meterA and meterC should be invoked + assertThat(meterAInvocations.get()).isPositive(); + assertThat(meterBInvocations.get()).isZero(); + assertThat(meterCInvocations.get()).isPositive(); + assertThat(meterDInvocations.get()).isZero(); + // Instruments from meterA and meterC are enabled, meterC is not enabled + assertMeterInstrumentsEnabled(meterA, /* expectedEnabled= */ true); + assertMeterInstrumentsEnabled(meterB, /* expectedEnabled= */ false); + assertMeterInstrumentsEnabled(meterC, /* expectedEnabled= */ true); + assertMeterInstrumentsEnabled(meterD, /* expectedEnabled= */ false); + } + + private static void recordToMeterInstruments(Meter meter, AtomicLong asyncInvocationsCount) { + meter.counterBuilder("longCounter").build().add(1); + meter.counterBuilder("doubleCounter").ofDoubles().build().add(1); + meter + .counterBuilder("asyncLongCounter") + .buildWithCallback( + observable -> { + asyncInvocationsCount.incrementAndGet(); + observable.record(1); + }); + meter + .counterBuilder("asyncDoubleCounter") + .ofDoubles() + .buildWithCallback( + observable -> { + asyncInvocationsCount.incrementAndGet(); + observable.record(1); + }); + meter.upDownCounterBuilder("longUpDownCounter").build().add(1); + meter.upDownCounterBuilder("doubleUpDownCounter").ofDoubles().build().add(1); + meter + .upDownCounterBuilder("asyncLongUpDownCounter") + .buildWithCallback( + observable -> { + asyncInvocationsCount.incrementAndGet(); + observable.record(1); + }); + meter + .upDownCounterBuilder("asyncDoubleUpDownCounter") + .ofDoubles() + .buildWithCallback( + observable -> { + asyncInvocationsCount.incrementAndGet(); + observable.record(1); + }); + meter.histogramBuilder("doubleHistogram").build().record(1.0); + meter.histogramBuilder("longHistogram").ofLongs().build().record(1); + meter.gaugeBuilder("doubleGauge").build().set(1); + meter.gaugeBuilder("longGauge").ofLongs().build().set(1); + meter + .gaugeBuilder("asyncDoubleGauge") + .buildWithCallback( + observable -> { + asyncInvocationsCount.incrementAndGet(); + observable.record(1.0); + }); + meter + .gaugeBuilder("asyncLongGauge") + .ofLongs() + .buildWithCallback( + observable -> { + asyncInvocationsCount.incrementAndGet(); + observable.record(1); + }); + } + + private static void assertMeterInstrumentsEnabled(Meter meter, boolean expectedEnabled) { + assertThat( + ((ExtendedDoubleCounter) meter.counterBuilder("doubleCounter").ofDoubles().build()) + .isEnabled()) + .isEqualTo(expectedEnabled); + assertThat(((ExtendedLongCounter) meter.counterBuilder("longCounter").build()).isEnabled()) + .isEqualTo(expectedEnabled); + assertThat( + ((ExtendedDoubleUpDownCounter) + meter.upDownCounterBuilder("doubleUpDownCounter").ofDoubles().build()) + .isEnabled()) + .isEqualTo(expectedEnabled); + assertThat( + ((ExtendedLongUpDownCounter) meter.upDownCounterBuilder("longUpDownCounter").build()) + .isEnabled()) + .isEqualTo(expectedEnabled); + assertThat( + ((ExtendedDoubleHistogram) meter.histogramBuilder("doubleHistogram").build()) + .isEnabled()) + .isEqualTo(expectedEnabled); + assertThat( + ((ExtendedLongHistogram) meter.histogramBuilder("longHistogram").ofLongs().build()) + .isEnabled()) + .isEqualTo(expectedEnabled); + assertThat(((ExtendedDoubleGauge) meter.gaugeBuilder("doubleGauge").build()).isEnabled()) + .isEqualTo(expectedEnabled); + assertThat(((ExtendedLongGauge) meter.gaugeBuilder("longGauge").ofLongs().build()).isEnabled()) + .isEqualTo(expectedEnabled); + } + + @ParameterizedTest + @MethodSource("meterConfiguratorArgs") + void meterConfigurator( + ScopeConfigurator meterConfigurator, + InstrumentationScopeInfo scope, + MeterConfig expectedMeterConfig) { + MeterConfig meterConfig = meterConfigurator.apply(scope); + meterConfig = meterConfig == null ? defaultConfig() : meterConfig; + assertThat(meterConfig).isEqualTo(expectedMeterConfig); + } + + private static final InstrumentationScopeInfo scopeCat = InstrumentationScopeInfo.create("cat"); + private static final InstrumentationScopeInfo scopeDog = InstrumentationScopeInfo.create("dog"); + private static final InstrumentationScopeInfo scopeDuck = InstrumentationScopeInfo.create("duck"); + + private static Stream meterConfiguratorArgs() { + ScopeConfigurator defaultConfigurator = MeterConfig.configuratorBuilder().build(); + ScopeConfigurator disableCat = + MeterConfig.configuratorBuilder() + .addCondition(nameEquals("cat"), MeterConfig.disabled()) + // Second matching rule for cat should be ignored + .addCondition(nameEquals("cat"), enabled()) + .build(); + ScopeConfigurator disableStartsWithD = + MeterConfig.configuratorBuilder() + .addCondition(nameMatchesGlob("d*"), MeterConfig.disabled()) + .build(); + ScopeConfigurator enableCat = + MeterConfig.configuratorBuilder() + .setDefault(MeterConfig.disabled()) + .addCondition(nameEquals("cat"), enabled()) + // Second matching rule for cat should be ignored + .addCondition(nameEquals("cat"), MeterConfig.disabled()) + .build(); + ScopeConfigurator enableStartsWithD = + MeterConfig.configuratorBuilder() + .setDefault(MeterConfig.disabled()) + .addCondition(nameMatchesGlob("d*"), MeterConfig.enabled()) + .build(); + + return Stream.of( + // default + Arguments.of(defaultConfigurator, scopeCat, defaultConfig()), + Arguments.of(defaultConfigurator, scopeDog, defaultConfig()), + Arguments.of(defaultConfigurator, scopeDuck, defaultConfig()), + // default enabled, disable cat + Arguments.of(disableCat, scopeCat, MeterConfig.disabled()), + Arguments.of(disableCat, scopeDog, enabled()), + Arguments.of(disableCat, scopeDuck, enabled()), + // default enabled, disable pattern + Arguments.of(disableStartsWithD, scopeCat, enabled()), + Arguments.of(disableStartsWithD, scopeDog, MeterConfig.disabled()), + Arguments.of(disableStartsWithD, scopeDuck, MeterConfig.disabled()), + // default disabled, enable cat + Arguments.of(enableCat, scopeCat, enabled()), + Arguments.of(enableCat, scopeDog, MeterConfig.disabled()), + Arguments.of(enableCat, scopeDuck, MeterConfig.disabled()), + // default disabled, enable pattern + Arguments.of(enableStartsWithD, scopeCat, MeterConfig.disabled()), + Arguments.of(enableStartsWithD, scopeDog, enabled()), + Arguments.of(enableStartsWithD, scopeDuck, enabled())); + } +} diff --git a/sdk/testing/build.gradle.kts b/sdk/testing/build.gradle.kts index a9f2534aafd..e6a095179d5 100644 --- a/sdk/testing/build.gradle.kts +++ b/sdk/testing/build.gradle.kts @@ -16,6 +16,8 @@ dependencies { annotationProcessor("com.google.auto.value:auto-value") + testImplementation(project(":api:incubator")) + testImplementation("junit:junit") testImplementation("org.junit.vintage:junit-vintage-engine") } diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/AbstractPointAssert.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/AbstractPointAssert.java index cf7454c0405..41da09a10e5 100644 --- a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/AbstractPointAssert.java +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/AbstractPointAssert.java @@ -14,6 +14,7 @@ import java.util.Arrays; import java.util.Map; import java.util.Set; +import java.util.function.Consumer; import javax.annotation.Nullable; import org.assertj.core.api.AbstractAssert; import org.assertj.core.api.Assertions; @@ -111,6 +112,17 @@ public final PointAssertT hasAttributesSatisfying(Iterable a return myself; } + /** + * Asserts the point has attributes satisfying the given condition. + * + * @since 1.33.0 + */ + public final PointAssertT hasAttributesSatisfying(Consumer attributes) { + isNotNull(); + assertThat(actual.getAttributes()).as("attributes").satisfies(attributes); + return myself; + } + /** * Asserts the point has attributes matching all {@code assertions} and no more. Assertions can be * created using methods like {@link OpenTelemetryAssertions#satisfies(AttributeKey, diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/LogRecordDataAssert.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/LogRecordDataAssert.java index 7489cb135b5..323e5b7813d 100644 --- a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/LogRecordDataAssert.java +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/LogRecordDataAssert.java @@ -5,18 +5,27 @@ package io.opentelemetry.sdk.testing.assertj; +import static io.opentelemetry.api.common.ValueType.KEY_VALUE_LIST; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static java.util.stream.Collectors.toList; +import static org.junit.Assert.assertNotNull; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.common.KeyValue; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.common.ValueType; import io.opentelemetry.api.logs.Severity; import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.logs.data.LogRecordData; import io.opentelemetry.sdk.resources.Resource; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.Consumer; import javax.annotation.Nullable; import org.assertj.core.api.AbstractAssert; @@ -54,8 +63,7 @@ public LogRecordDataAssert hasResource(Resource resource) { public LogRecordDataAssert hasResourceSatisfying(Consumer resource) { isNotNull(); resource.accept( - new ResourceAssert( - actual.getResource(), String.format("log [%s]", actual.getBody().asString()))); + new ResourceAssert(actual.getResource(), String.format("log [%s]", actual.getBodyValue()))); return this; } @@ -76,6 +84,8 @@ public LogRecordDataAssert hasInstrumentationScope( return this; } + // TODO (trask) once event name stabilizes, add hasEventName(String) + /** Asserts the log has the given epoch {@code timestamp}. */ public LogRecordDataAssert hasTimestamp(long timestampEpochNanos) { isNotNull(); @@ -149,13 +159,175 @@ public LogRecordDataAssert hasSeverityText(String severityText) { /** Asserts the log has the given body. */ public LogRecordDataAssert hasBody(String body) { isNotNull(); - if (!actual.getBody().asString().equals(body)) { + return hasBody(Value.of(body)); + } + + /** + * Asserts the log has the given body. + * + * @since 1.42.0 + */ + public LogRecordDataAssert hasBody(@Nullable Value body) { + isNotNull(); + if (!Objects.equals(actual.getBodyValue(), body)) { failWithActualExpectedAndMessage( - actual.getBody(), + actual.getBodyValue(), body, "Expected log to have body <%s> but was <%s>", body, - actual.getBody().asString()); + actual.getBodyValue()); + } + return this; + } + + /** + * Asserts the log has a body of type {@link ValueType#KEY_VALUE_LIST}, containing a field with + * the given {@code key} and String {@code value}. + * + * @since 1.42.0 + */ + public LogRecordDataAssert hasBodyField(String key, String value) { + return hasBodyField(key, Value.of(value)); + } + + /** + * Asserts the log has a body of type {@link ValueType#KEY_VALUE_LIST}, containing a field with + * the given {@code key} and long {@code value}. + * + * @since 1.42.0 + */ + public LogRecordDataAssert hasBodyField(String key, long value) { + return hasBodyField(key, Value.of(value)); + } + + /** + * Asserts the log has a body of type {@link ValueType#KEY_VALUE_LIST}, containing a field with + * the given {@code key} and double {@code value}. + * + * @since 1.42.0 + */ + public LogRecordDataAssert hasBodyField(String key, double value) { + return hasBodyField(key, Value.of(value)); + } + + /** + * Asserts the log has a body of type {@link ValueType#KEY_VALUE_LIST}, containing a field with + * the given {@code key} and boolean {@code value}. + * + * @since 1.42.0 + */ + public LogRecordDataAssert hasBodyField(String key, boolean value) { + return hasBodyField(key, Value.of(value)); + } + + /** + * Asserts the log has a body of type {@link ValueType#KEY_VALUE_LIST}, containing a field with + * the given {@code key} and list of String {@code value}s. + * + * @since 1.42.0 + */ + public LogRecordDataAssert hasBodyField(String key, String... value) { + List> values = new ArrayList<>(value.length); + for (String val : value) { + values.add(Value.of(val)); + } + return hasBodyField(key, Value.of(values)); + } + + /** + * Asserts the log has a body of type {@link ValueType#KEY_VALUE_LIST}, containing a field with + * the given {@code key} and list of long {@code value}s. + * + * @since 1.42.0 + */ + public LogRecordDataAssert hasBodyField(String key, long... value) { + List> values = new ArrayList<>(value.length); + for (long val : value) { + values.add(Value.of(val)); + } + return hasBodyField(key, Value.of(values)); + } + + /** + * Asserts the log has a body of type {@link ValueType#KEY_VALUE_LIST}, containing a field with + * the given {@code key} and list of double {@code value}s. + * + * @since 1.42.0 + */ + public LogRecordDataAssert hasBodyField(String key, double... value) { + List> values = new ArrayList<>(value.length); + for (double val : value) { + values.add(Value.of(val)); + } + return hasBodyField(key, Value.of(values)); + } + + /** + * Asserts the log has a body of type {@link ValueType#KEY_VALUE_LIST}, containing a field with + * the given {@code key} and list of boolean {@code value}s. + * + * @since 1.42.0 + */ + public LogRecordDataAssert hasBodyField(String key, boolean... value) { + List> values = new ArrayList<>(value.length); + for (boolean val : value) { + values.add(Value.of(val)); + } + return hasBodyField(key, Value.of(values)); + } + + /** + * Asserts the log has a body of type {@link ValueType#KEY_VALUE_LIST}, containing a field with + * the given {@code key} and {@code value}. + * + * @since 1.42.0 + */ + @SuppressWarnings({"unchecked"}) + public LogRecordDataAssert hasBodyField(String key, Value value) { + isNotNull(); + Value bodyValue = actual.getBodyValue(); + assertNotNull( + "Body was not expected to be null.", bodyValue); // Can't use assertj or nullaway complains + assertThat(bodyValue.getType()).isEqualTo(KEY_VALUE_LIST); + Value> body = (Value>) bodyValue; + List payload = body.getValue(); + KeyValue expected = KeyValue.of(key, value); + assertThat(payload).contains(expected); + return this; + } + + /** + * Asserts the log has a body of type {@link ValueType#KEY_VALUE_LIST}, containing a field with + * the given attribute {@code key} and {@code value}. + * + * @since 1.42.0 + */ + @SuppressWarnings({"unchecked"}) + public LogRecordDataAssert hasBodyField(AttributeKey key, T value) { + switch (key.getType()) { + case STRING: + return hasBodyField(key.getKey(), (String) value); + case BOOLEAN: + return hasBodyField(key.getKey(), (boolean) value); + case LONG: + return hasBodyField(key.getKey(), (long) value); + case DOUBLE: + return hasBodyField(key.getKey(), (double) value); + case STRING_ARRAY: + return hasBodyField( + key.getKey(), + Value.of(((List) value).stream().map(Value::of).collect(toList()))); + case BOOLEAN_ARRAY: + return hasBodyField( + key.getKey(), + Value.of(((List) value).stream().map(Value::of).collect(toList()))); + case LONG_ARRAY: + return hasBodyField( + key.getKey(), Value.of(((List) value).stream().map(Value::of).collect(toList()))); + case DOUBLE_ARRAY: + return hasBodyField( + key.getKey(), + Value.of(((List) value).stream().map(Value::of).collect(toList()))); } return this; } diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/LongSumAssert.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/LongSumAssert.java index 16ec2109e9a..128a95ebce8 100644 --- a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/LongSumAssert.java +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/LongSumAssert.java @@ -73,14 +73,14 @@ public LongSumAssert isDelta() { return myself; } - /** Asserts the sum has points matching all of the given assertions and no more, in any order. */ + /** Asserts the sum has points matching all the given assertions and no more, in any order. */ @SafeVarargs @SuppressWarnings("varargs") public final LongSumAssert hasPointsSatisfying(Consumer... assertions) { return hasPointsSatisfying(Arrays.asList(assertions)); } - /** Asserts the sum has points matching all of the given assertions and no more, in any order. */ + /** Asserts the sum has points matching all the given assertions and no more, in any order. */ public LongSumAssert hasPointsSatisfying( Iterable> assertions) { assertThat(actual.getPoints()) diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/TraceAssert.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/TraceAssert.java index 5f3da9db7f4..ee19607e2cd 100644 --- a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/TraceAssert.java +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/TraceAssert.java @@ -59,9 +59,10 @@ public TraceAssert hasSpansSatisfyingExactly( List> assertionsList = StreamSupport.stream(assertions.spliterator(), false).collect(Collectors.toList()); hasSize(assertionsList.size()); + // Avoid zipSatisfy - https://github.com/assertj/assertj-core/issues/2300 for (int i = 0; i < assertionsList.size(); i++) { - assertionsList.get(i).accept(new SpanDataAssert(actual.get(i))); + assertionsList.get(i).accept(new SpanDataAssert(actual.get(i)).describedAs("Span " + i)); } return this; } diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/TracesAssert.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/TracesAssert.java index e120ae7c854..d4c936fe5b2 100644 --- a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/TracesAssert.java +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/TracesAssert.java @@ -105,7 +105,7 @@ public TracesAssert hasTracesSatisfyingExactly( hasSize(assertionsList.size()); // Avoid zipSatisfy - https://github.com/assertj/assertj-core/issues/2300 for (int i = 0; i < assertionsList.size(); i++) { - assertionsList.get(i).accept(new TraceAssert(actual.get(i))); + assertionsList.get(i).accept(new TraceAssert(actual.get(i)).describedAs("Trace " + i)); } return this; } diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/context/SettableContextStorageProvider.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/context/SettableContextStorageProvider.java index 1a34c3eade4..22b4fc6ec87 100644 --- a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/context/SettableContextStorageProvider.java +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/context/SettableContextStorageProvider.java @@ -34,6 +34,7 @@ public static ContextStorage getContextStorage() { private enum SettableContextStorage implements ContextStorage { INSTANCE; + @SuppressWarnings("NonFinalStaticField") private static volatile ContextStorage delegate = createStorage(); @Override diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/exporter/InMemoryMetricReader.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/exporter/InMemoryMetricReader.java index 7957035b2ed..3b79affe346 100644 --- a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/exporter/InMemoryMetricReader.java +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/exporter/InMemoryMetricReader.java @@ -5,7 +5,10 @@ package io.opentelemetry.sdk.testing.exporter; +import static io.opentelemetry.sdk.common.export.MemoryMode.IMMUTABLE_DATA; + import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.export.MemoryMode; import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; @@ -14,7 +17,6 @@ import io.opentelemetry.sdk.metrics.export.CollectionRegistration; import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; import io.opentelemetry.sdk.metrics.export.MetricReader; -import io.opentelemetry.sdk.metrics.internal.export.MetricProducer; import java.util.Collection; import java.util.Collections; import java.util.concurrent.atomic.AtomicBoolean; @@ -51,7 +53,19 @@ public class InMemoryMetricReader implements MetricReader { private final AggregationTemporalitySelector aggregationTemporalitySelector; private final DefaultAggregationSelector defaultAggregationSelector; private final AtomicBoolean isShutdown = new AtomicBoolean(false); - private volatile MetricProducer metricProducer = MetricProducer.noop(); + private volatile CollectionRegistration collectionRegistration = CollectionRegistration.noop(); + private final MemoryMode memoryMode; + + /** + * Creates an {@link InMemoryMetricReaderBuilder} with defaults. + * + * @return a builder with always-cumulative {@link AggregationTemporalitySelector}, default {@link + * DefaultAggregationSelector} and {@link MemoryMode#IMMUTABLE_DATA} {@link MemoryMode} + * @since 1.31.0 + */ + public static InMemoryMetricReaderBuilder builder() { + return new InMemoryMetricReaderBuilder(); + } /** Returns a new {@link InMemoryMetricReader}. */ public static InMemoryMetricReader create() { @@ -79,8 +93,16 @@ public static InMemoryMetricReader createDelta() { private InMemoryMetricReader( AggregationTemporalitySelector aggregationTemporalitySelector, DefaultAggregationSelector defaultAggregationSelector) { + this(aggregationTemporalitySelector, defaultAggregationSelector, IMMUTABLE_DATA); + } + + InMemoryMetricReader( + AggregationTemporalitySelector aggregationTemporalitySelector, + DefaultAggregationSelector defaultAggregationSelector, + MemoryMode memoryMode) { this.aggregationTemporalitySelector = aggregationTemporalitySelector; this.defaultAggregationSelector = defaultAggregationSelector; + this.memoryMode = memoryMode; } /** Returns all metrics accumulated since the last call. */ @@ -88,12 +110,12 @@ public Collection collectAllMetrics() { if (isShutdown.get()) { return Collections.emptyList(); } - return metricProducer.collectAllMetrics(); + return collectionRegistration.collectAllMetrics(); } @Override - public void register(CollectionRegistration registration) { - this.metricProducer = MetricProducer.asMetricProducer(registration); + public void register(CollectionRegistration collectionRegistration) { + this.collectionRegistration = collectionRegistration; } @Override @@ -118,6 +140,11 @@ public CompletableResultCode shutdown() { return CompletableResultCode.ofSuccess(); } + @Override + public MemoryMode getMemoryMode() { + return memoryMode; + } + @Override public String toString() { return "InMemoryMetricReader{}"; diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/exporter/InMemoryMetricReaderBuilder.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/exporter/InMemoryMetricReaderBuilder.java new file mode 100644 index 00000000000..8a0c934bb2d --- /dev/null +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/exporter/InMemoryMetricReaderBuilder.java @@ -0,0 +1,79 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.testing.exporter; + +import static io.opentelemetry.sdk.common.export.MemoryMode.IMMUTABLE_DATA; + +import io.opentelemetry.sdk.common.export.MemoryMode; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; +import io.opentelemetry.sdk.metrics.export.DefaultAggregationSelector; +import io.opentelemetry.sdk.metrics.export.MetricExporter; + +/** + * Builder for {@link InMemoryMetricReader}. + * + * @since 1.31.0 + */ +public final class InMemoryMetricReaderBuilder { + private AggregationTemporalitySelector aggregationTemporalitySelector = + AggregationTemporalitySelector.alwaysCumulative(); + private DefaultAggregationSelector defaultAggregationSelector = + DefaultAggregationSelector.getDefault(); + private MemoryMode memoryMode = IMMUTABLE_DATA; + + /** + * Creates an {@link InMemoryMetricReaderBuilder} with defaults. + * + *

    Creates a builder with always-cumulative {@link AggregationTemporalitySelector}, default + * {@link DefaultAggregationSelector} and {@link MemoryMode#IMMUTABLE_DATA} {@link MemoryMode} + */ + InMemoryMetricReaderBuilder() {} + + /** + * Sets the {@link AggregationTemporalitySelector} used by {@link + * MetricExporter#getAggregationTemporality(InstrumentType)}. + * + * @param aggregationTemporalitySelector the {@link AggregationTemporalitySelector} to set + * @return this {@link InMemoryMetricReaderBuilder} + */ + public InMemoryMetricReaderBuilder setAggregationTemporalitySelector( + AggregationTemporalitySelector aggregationTemporalitySelector) { + this.aggregationTemporalitySelector = aggregationTemporalitySelector; + return this; + } + + /** + * Sets the {@link DefaultAggregationSelector} used by {@link + * MetricExporter#getDefaultAggregation(InstrumentType)}. + * + * @param defaultAggregationSelector the {@link DefaultAggregationSelector} to set + * @return this {@link InMemoryMetricReaderBuilder} + */ + @SuppressWarnings("unused") + public InMemoryMetricReaderBuilder setDefaultAggregationSelector( + DefaultAggregationSelector defaultAggregationSelector) { + this.defaultAggregationSelector = defaultAggregationSelector; + return this; + } + + /** + * Sets the {@link MemoryMode}. + * + * @param memoryMode the {@link MemoryMode} to set + * @return this {@link InMemoryMetricReaderBuilder} + */ + public InMemoryMetricReaderBuilder setMemoryMode(MemoryMode memoryMode) { + this.memoryMode = memoryMode; + return this; + } + + /** Constructs a {@link InMemoryMetricReader} based on the builder's values. */ + public InMemoryMetricReader build() { + return new InMemoryMetricReader( + aggregationTemporalitySelector, defaultAggregationSelector, memoryMode); + } +} diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/junit4/OpenTelemetryRule.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/junit4/OpenTelemetryRule.java index b0b15782a12..0fb0a7fd181 100644 --- a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/junit4/OpenTelemetryRule.java +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/junit4/OpenTelemetryRule.java @@ -7,13 +7,19 @@ import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.baggage.propagation.W3CBaggagePropagator; import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.context.propagation.TextMapPropagator; import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.logs.SdkLoggerProvider; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor; import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil; +import io.opentelemetry.sdk.testing.exporter.InMemoryLogRecordExporter; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; import io.opentelemetry.sdk.trace.SdkTracerProvider; @@ -71,27 +77,42 @@ public static OpenTelemetryRule create() { SdkMeterProvider meterProvider = SdkMeterProvider.builder().registerMetricReader(metricReader).build(); + InMemoryLogRecordExporter logRecordExporter = InMemoryLogRecordExporter.create(); + + SdkLoggerProvider loggerProvider = + SdkLoggerProvider.builder() + .addLogRecordProcessor(SimpleLogRecordProcessor.create(logRecordExporter)) + .build(); + OpenTelemetrySdk openTelemetry = OpenTelemetrySdk.builder() - .setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance())) + .setPropagators( + ContextPropagators.create( + TextMapPropagator.composite( + W3CTraceContextPropagator.getInstance(), + W3CBaggagePropagator.getInstance()))) .setTracerProvider(tracerProvider) .setMeterProvider(meterProvider) + .setLoggerProvider(loggerProvider) .build(); - return new OpenTelemetryRule(openTelemetry, spanExporter, metricReader); + return new OpenTelemetryRule(openTelemetry, spanExporter, metricReader, logRecordExporter); } private final OpenTelemetrySdk openTelemetry; private final InMemorySpanExporter spanExporter; private final InMemoryMetricReader metricReader; + private final InMemoryLogRecordExporter logRecordExporter; private OpenTelemetryRule( OpenTelemetrySdk openTelemetry, InMemorySpanExporter spanExporter, - InMemoryMetricReader metricReader) { + InMemoryMetricReader metricReader, + InMemoryLogRecordExporter logRecordExporter) { this.openTelemetry = openTelemetry; this.spanExporter = spanExporter; this.metricReader = metricReader; + this.logRecordExporter = logRecordExporter; } /** Returns the {@link OpenTelemetrySdk} created by this extension. */ @@ -113,6 +134,15 @@ public List getMetrics() { return new ArrayList<>(metricReader.collectAllMetrics()); } + /** + * Returns all the exported {@link LogRecordData} so far. + * + * @since 1.32.0 + */ + public List getLogRecords() { + return new ArrayList<>(logRecordExporter.getFinishedLogRecordItems()); + } + /** * Clears the collected exported {@link SpanData}. Consider making your test smaller instead of * manually clearing state using this method. @@ -130,12 +160,23 @@ public void clearMetrics() { SdkMeterProviderUtil.resetForTest(openTelemetry.getSdkMeterProvider()); } + /** + * Clears the collected exported {@link LogRecordData}. Consider making your test smaller instead + * of manually clearing state using this method. + * + * @since 1.32.0 + */ + public void clearLogRecords() { + logRecordExporter.reset(); + } + @Override protected void before() { GlobalOpenTelemetry.resetForTest(); GlobalOpenTelemetry.set(openTelemetry); clearSpans(); clearMetrics(); + clearLogRecords(); } @Override diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/junit5/OpenTelemetryExtension.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/junit5/OpenTelemetryExtension.java index 35ed9f4a637..5ce9d1dc72a 100644 --- a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/junit5/OpenTelemetryExtension.java +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/junit5/OpenTelemetryExtension.java @@ -9,14 +9,20 @@ import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.baggage.propagation.W3CBaggagePropagator; import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.context.propagation.TextMapPropagator; import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.logs.SdkLoggerProvider; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor; import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil; import io.opentelemetry.sdk.testing.assertj.TracesAssert; +import io.opentelemetry.sdk.testing.exporter.InMemoryLogRecordExporter; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; import io.opentelemetry.sdk.trace.SdkTracerProvider; @@ -73,27 +79,42 @@ public static OpenTelemetryExtension create() { SdkMeterProvider meterProvider = SdkMeterProvider.builder().registerMetricReader(metricReader).build(); + InMemoryLogRecordExporter logRecordExporter = InMemoryLogRecordExporter.create(); + + SdkLoggerProvider loggerProvider = + SdkLoggerProvider.builder() + .addLogRecordProcessor(SimpleLogRecordProcessor.create(logRecordExporter)) + .build(); + OpenTelemetrySdk openTelemetry = OpenTelemetrySdk.builder() - .setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance())) + .setPropagators( + ContextPropagators.create( + TextMapPropagator.composite( + W3CTraceContextPropagator.getInstance(), + W3CBaggagePropagator.getInstance()))) .setTracerProvider(tracerProvider) .setMeterProvider(meterProvider) + .setLoggerProvider(loggerProvider) .build(); - return new OpenTelemetryExtension(openTelemetry, spanExporter, metricReader); + return new OpenTelemetryExtension(openTelemetry, spanExporter, metricReader, logRecordExporter); } private final OpenTelemetrySdk openTelemetry; private final InMemorySpanExporter spanExporter; private final InMemoryMetricReader metricReader; + private final InMemoryLogRecordExporter logRecordExporter; private OpenTelemetryExtension( OpenTelemetrySdk openTelemetry, InMemorySpanExporter spanExporter, - InMemoryMetricReader metricReader) { + InMemoryMetricReader metricReader, + InMemoryLogRecordExporter logRecordExporter) { this.openTelemetry = openTelemetry; this.spanExporter = spanExporter; this.metricReader = metricReader; + this.logRecordExporter = logRecordExporter; } /** Returns the {@link OpenTelemetrySdk} created by this extension. */ @@ -115,6 +136,15 @@ public List getMetrics() { return new ArrayList<>(metricReader.collectAllMetrics()); } + /** + * Returns all the exported {@link LogRecordData} so far. + * + * @since 1.32.0 + */ + public List getLogRecords() { + return new ArrayList<>(logRecordExporter.getFinishedLogRecordItems()); + } + /** * Returns a {@link TracesAssert} for asserting on the currently exported traces. This method * requires AssertJ to be on the classpath. @@ -140,10 +170,21 @@ public void clearMetrics() { SdkMeterProviderUtil.resetForTest(openTelemetry.getSdkMeterProvider()); } + /** + * Clears the collected exported {@link LogRecordData}. Consider making your test smaller instead + * of manually clearing state using this method. + * + * @since 1.32.0 + */ + public void clearLogRecords() { + logRecordExporter.reset(); + } + @Override public void beforeEach(ExtensionContext context) { clearSpans(); clearMetrics(); + clearLogRecords(); } @Override diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/logs/TestLogRecordData.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/logs/TestLogRecordData.java index 7de560fac49..ec2e37294f4 100644 --- a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/logs/TestLogRecordData.java +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/logs/TestLogRecordData.java @@ -7,14 +7,15 @@ import com.google.auto.value.AutoValue; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.Value; import io.opentelemetry.api.logs.Severity; import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.logs.data.Body; import io.opentelemetry.sdk.logs.data.LogRecordData; import io.opentelemetry.sdk.resources.Resource; import java.time.Instant; import java.util.concurrent.TimeUnit; +import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; /** @@ -24,6 +25,9 @@ */ @Immutable @AutoValue +@AutoValue.CopyAnnotations +// Carry suppression for Body to AutoValue implementation via @AutoValue.CopyAnnotations +@SuppressWarnings("deprecation") public abstract class TestLogRecordData implements LogRecordData { /** Creates a new Builder for creating an {@link LogRecordData} instance. */ @@ -35,11 +39,27 @@ public static Builder builder() { .setObservedTimestamp(0, TimeUnit.NANOSECONDS) .setSpanContext(SpanContext.getInvalid()) .setSeverity(Severity.UNDEFINED_SEVERITY_NUMBER) - .setBody("") .setAttributes(Attributes.empty()) .setTotalAttributeCount(0); } + @Deprecated + public io.opentelemetry.sdk.logs.data.Body getBody() { + Value valueBody = getBodyValue(); + return valueBody == null + ? io.opentelemetry.sdk.logs.data.Body.empty() + : io.opentelemetry.sdk.logs.data.Body.string(valueBody.asString()); + } + + /** + * {@inheritDoc} + * + * @since 1.42.0 + */ + @Override + @Nullable + public abstract Value getBodyValue(); + TestLogRecordData() {} /** A {@code Builder} class for {@link TestLogRecordData}. */ @@ -123,11 +143,30 @@ public Builder setObservedTimestamp(long timestamp, TimeUnit unit) { /** Set the body string. */ public Builder setBody(String body) { - return setBody(Body.string(body)); + return setBodyValue(Value.of(body)); } - /** Set the body. */ - abstract Builder setBody(Body body); + /** + * Set the body. + * + * @deprecated Use {@link #setBodyValue(Value)}. + */ + @Deprecated + public Builder setBody(io.opentelemetry.sdk.logs.data.Body body) { + if (body.getType() == io.opentelemetry.sdk.logs.data.Body.Type.STRING) { + setBodyValue(Value.of(body.asString())); + } else if (body.getType() == io.opentelemetry.sdk.logs.data.Body.Type.EMPTY) { + setBodyValue(null); + } + return this; + } + + /** + * Set the body. + * + * @since 1.42.0 + */ + public abstract Builder setBodyValue(@Nullable Value body); /** Set the attributes. */ public abstract Builder setAttributes(Attributes attributes); diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/logs/internal/TestExtendedLogRecordData.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/logs/internal/TestExtendedLogRecordData.java new file mode 100644 index 00000000000..750344bf748 --- /dev/null +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/logs/internal/TestExtendedLogRecordData.java @@ -0,0 +1,191 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.testing.logs.internal; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.logs.Severity; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.data.internal.ExtendedLogRecordData; +import io.opentelemetry.sdk.resources.Resource; +import java.time.Instant; +import java.util.concurrent.TimeUnit; +import javax.annotation.Nullable; +import javax.annotation.concurrent.Immutable; + +/** + * Immutable representation of {@link LogRecordData}. + * + *

    This class is internal and experimental. Its APIs are unstable and can change at any time. Its + * APIs (or a version of them) may be promoted to the public stable API in the future, but no + * guarantees are made. + * + * @since 1.27.0 + */ +// TODO (trask) delete this class once event name stabilizes +@Immutable +@AutoValue +@AutoValue.CopyAnnotations +// Carry suppression for Body to AutoValue implementation via @AutoValue.CopyAnnotations +@SuppressWarnings("deprecation") +public abstract class TestExtendedLogRecordData implements ExtendedLogRecordData { + + /** Creates a new Builder for creating an {@link LogRecordData} instance. */ + public static Builder builder() { + return new AutoValue_TestExtendedLogRecordData.Builder() + .setResource(Resource.empty()) + .setInstrumentationScopeInfo(InstrumentationScopeInfo.empty()) + .setTimestamp(0, TimeUnit.NANOSECONDS) + .setObservedTimestamp(0, TimeUnit.NANOSECONDS) + .setSpanContext(SpanContext.getInvalid()) + .setSeverity(Severity.UNDEFINED_SEVERITY_NUMBER) + .setAttributes(Attributes.empty()) + .setTotalAttributeCount(0); + } + + @Deprecated + public io.opentelemetry.sdk.logs.data.Body getBody() { + Value valueBody = getBodyValue(); + return valueBody == null + ? io.opentelemetry.sdk.logs.data.Body.empty() + : io.opentelemetry.sdk.logs.data.Body.string(valueBody.asString()); + } + + /** + * {@inheritDoc} + * + * @since 1.42.0 + */ + @Override + @Nullable + public abstract Value getBodyValue(); + + TestExtendedLogRecordData() {} + + /** + * A {@code Builder} class for {@link TestExtendedLogRecordData}. + * + *

    This class is internal and experimental. Its APIs are unstable and can change at any time. + * Its APIs (or a version of them) may be promoted to the public stable API in the future, but no + * guarantees are made. + */ + @AutoValue.Builder + public abstract static class Builder { + + abstract TestExtendedLogRecordData autoBuild(); + + /** Create a new {@link LogRecordData} instance from the data in this. */ + public TestExtendedLogRecordData build() { + return autoBuild(); + } + + /** Set the {@link Resource}. */ + public abstract Builder setResource(Resource resource); + + /** Sets the {@link InstrumentationScopeInfo}. */ + public abstract Builder setInstrumentationScopeInfo( + InstrumentationScopeInfo instrumentationScopeInfo); + + public abstract Builder setEventName(String eventName); + + /** + * Set the epoch {@code timestamp}, using the instant. + * + *

    The {@code timestamp} is the time at which the log record occurred. + */ + public Builder setTimestamp(Instant instant) { + return setTimestampEpochNanos( + TimeUnit.SECONDS.toNanos(instant.getEpochSecond()) + instant.getNano()); + } + + /** + * Set the epoch {@code timestamp}, using the timestamp and unit. + * + *

    The {@code timestamp} is the time at which the log record occurred. + */ + public Builder setTimestamp(long timestamp, TimeUnit unit) { + return setTimestampEpochNanos(unit.toNanos(timestamp)); + } + + /** + * Set the epoch {@code timestamp}. + * + *

    The {@code timestamp} is the time at which the log record occurred. + */ + abstract Builder setTimestampEpochNanos(long epochNanos); + + /** + * Set the {@code observedTimestamp}, using the instant. + * + *

    The {@code observedTimestamp} is the time at which the log record was observed. + */ + public Builder setObservedTimestamp(Instant instant) { + return setObservedTimestampEpochNanos( + TimeUnit.SECONDS.toNanos(instant.getEpochSecond()) + instant.getNano()); + } + + /** + * Set the epoch {@code observedTimestamp}, using the timestamp and unit. + * + *

    The {@code observedTimestamp} is the time at which the log record was observed. + */ + public Builder setObservedTimestamp(long timestamp, TimeUnit unit) { + return setObservedTimestampEpochNanos(unit.toNanos(timestamp)); + } + + /** + * Set the epoch {@code observedTimestamp}. + * + *

    The {@code observedTimestamp} is the time at which the log record was observed. + */ + abstract Builder setObservedTimestampEpochNanos(long epochNanos); + + /** Set the span context. */ + public abstract Builder setSpanContext(SpanContext spanContext); + + /** Set the severity. */ + public abstract Builder setSeverity(Severity severity); + + /** Set the severity text. */ + public abstract Builder setSeverityText(String severityText); + + /** Set the body string. */ + public Builder setBody(String body) { + return setBodyValue(Value.of(body)); + } + + /** + * Set the body. + * + * @deprecated Use {@link #setBodyValue(Value)}. + */ + @Deprecated + public Builder setBody(io.opentelemetry.sdk.logs.data.Body body) { + if (body.getType() == io.opentelemetry.sdk.logs.data.Body.Type.STRING) { + setBodyValue(Value.of(body.asString())); + } else if (body.getType() == io.opentelemetry.sdk.logs.data.Body.Type.EMPTY) { + setBodyValue(null); + } + return this; + } + + /** + * Set the body. + * + * @since 1.42.0 + */ + public abstract Builder setBodyValue(@Nullable Value body); + + /** Set the attributes. */ + public abstract Builder setAttributes(Attributes attributes); + + /** Set the total attribute count. */ + public abstract Builder setTotalAttributeCount(int totalAttributeCount); + } +} diff --git a/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/AttributeAssertionTest.java b/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/AttributeAssertionTest.java index aa229eefb31..44c09e39af2 100644 --- a/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/AttributeAssertionTest.java +++ b/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/AttributeAssertionTest.java @@ -23,6 +23,9 @@ void nullAttr_errorMessageContainsAttrName() { .getAssertion() .accept(AttributeAssertion.attributeValueAssertion(key, null))) .isInstanceOf(AssertionError.class) - .hasMessage("[STRING attribute 'flib'] \nExpecting actual not to be null"); + .hasMessage( + "[STRING attribute 'flib'] " + + System.lineSeparator() + + "Expecting actual not to be null"); } } diff --git a/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/LogAssertionsTest.java b/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/LogAssertionsTest.java index 15da07a5214..165848c7f71 100644 --- a/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/LogAssertionsTest.java +++ b/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/LogAssertionsTest.java @@ -5,6 +5,8 @@ package io.opentelemetry.sdk.testing.assertj; +import static io.opentelemetry.api.common.AttributeKey.longKey; +import static io.opentelemetry.api.common.AttributeKey.stringKey; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; @@ -13,15 +15,22 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.KeyValue; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.logs.Logger; import io.opentelemetry.api.logs.Severity; import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.api.trace.TraceFlags; import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.logs.SdkLoggerProvider; import io.opentelemetry.sdk.logs.data.LogRecordData; +import io.opentelemetry.sdk.logs.export.SimpleLogRecordProcessor; import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.testing.logs.TestLogRecordData; +import io.opentelemetry.sdk.testing.exporter.InMemoryLogRecordExporter; +import io.opentelemetry.sdk.testing.logs.internal.TestExtendedLogRecordData; import java.util.Arrays; +import java.util.List; import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.Test; @@ -33,7 +42,7 @@ public class LogAssertionsTest { private static final String TRACE_ID = "00000000000000010000000000000002"; private static final String SPAN_ID = "0000000000000003"; - private static final AttributeKey DOG = AttributeKey.stringKey("dog"); + private static final AttributeKey DOG = stringKey("dog"); private static final Attributes ATTRIBUTES = Attributes.builder() .put("bear", "mya") @@ -47,9 +56,10 @@ public class LogAssertionsTest { .build(); private static final LogRecordData LOG_DATA = - TestLogRecordData.builder() + TestExtendedLogRecordData.builder() .setResource(RESOURCE) .setInstrumentationScopeInfo(INSTRUMENTATION_SCOPE_INFO) + .setEventName("event name") .setTimestamp(100, TimeUnit.NANOSECONDS) .setObservedTimestamp(200, TimeUnit.NANOSECONDS) .setSpanContext( @@ -79,7 +89,7 @@ void passing() { attributes -> assertThat(attributes) .hasSize(2) - .containsEntry(AttributeKey.stringKey("dog"), "bark") + .containsEntry(stringKey("dog"), "bark") .hasEntrySatisfying(DOG, value -> assertThat(value).hasSize(4)) .hasEntrySatisfying( AttributeKey.booleanKey("dog is cute"), @@ -97,6 +107,8 @@ void passing() { satisfies(DOG, val -> val.startsWith("bar")), satisfies(AttributeKey.booleanKey("dog is cute"), val -> val.isTrue()))) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) + // TODO (trask) once event name stabilizes + // .hasEventName("event name") .hasTimestamp(100) .hasObservedTimestamp(200) .hasSpanContext( @@ -118,14 +130,13 @@ void passing() { attributes -> OpenTelemetryAssertions.assertThat(attributes) .hasSize(8) - .containsEntry(AttributeKey.stringKey("bear"), "mya") - .hasEntrySatisfying( - AttributeKey.stringKey("bear"), value -> assertThat(value).hasSize(3)) + .containsEntry(stringKey("bear"), "mya") + .hasEntrySatisfying(stringKey("bear"), value -> assertThat(value).hasSize(3)) .containsEntry("bear", "mya") .containsEntry("warm", true) .containsEntry("temperature", 30) - .containsEntry(AttributeKey.longKey("temperature"), 30L) - .containsEntry(AttributeKey.longKey("temperature"), 30) + .containsEntry(longKey("temperature"), 30L) + .containsEntry(longKey("temperature"), 30) .containsEntry("length", 1.2) .containsEntry("colors", "red", "blue") .containsEntryWithStringValuesOf("colors", Arrays.asList("red", "blue")) @@ -135,7 +146,7 @@ void passing() { .containsEntryWithLongValuesOf("scores", Arrays.asList(0L, 1L)) .containsEntry("coins", 0.01, 0.05, 0.1) .containsEntryWithDoubleValuesOf("coins", Arrays.asList(0.01, 0.05, 0.1)) - .containsKey(AttributeKey.stringKey("bear")) + .containsKey(stringKey("bear")) .containsKey("bear") .containsOnly( attributeEntry("bear", "mya"), @@ -147,12 +158,12 @@ void passing() { attributeEntry("scores", 0L, 1L), attributeEntry("coins", 0.01, 0.05, 0.1))) .hasAttributesSatisfying( - equalTo(AttributeKey.stringKey("bear"), "mya"), + equalTo(stringKey("bear"), "mya"), equalTo(AttributeKey.booleanArrayKey("conditions"), Arrays.asList(false, true))) .hasAttributesSatisfyingExactly( - equalTo(AttributeKey.stringKey("bear"), "mya"), + equalTo(stringKey("bear"), "mya"), equalTo(AttributeKey.booleanKey("warm"), true), - equalTo(AttributeKey.longKey("temperature"), 30L), + equalTo(longKey("temperature"), 30L), equalTo(AttributeKey.doubleKey("length"), 1.2), equalTo(AttributeKey.stringArrayKey("colors"), Arrays.asList("red", "blue")), equalTo(AttributeKey.booleanArrayKey("conditions"), Arrays.asList(false, true)), @@ -228,7 +239,7 @@ void failure() { .hasAttributesSatisfying( attributes -> OpenTelemetryAssertions.assertThat(attributes) - .containsKey(AttributeKey.stringKey("cat")))) + .containsKey(stringKey("cat")))) .isInstanceOf(AssertionError.class); assertThatThrownBy( () -> @@ -256,24 +267,65 @@ void failure() { attributes -> OpenTelemetryAssertions.assertThat(attributes) .hasEntrySatisfying( - AttributeKey.stringKey("bear"), - value -> assertThat(value).hasSize(2)))) + stringKey("bear"), value -> assertThat(value).hasSize(2)))) .isInstanceOf(AssertionError.class); assertThatThrownBy( - () -> - assertThat(LOG_DATA) - .hasAttributesSatisfying(equalTo(AttributeKey.stringKey("bear"), "moo"))) + () -> assertThat(LOG_DATA).hasAttributesSatisfying(equalTo(stringKey("bear"), "moo"))) .isInstanceOf(AssertionError.class); assertThatThrownBy( () -> assertThat(LOG_DATA) .hasAttributesSatisfyingExactly( - equalTo(AttributeKey.stringKey("bear"), "mya"), + equalTo(stringKey("bear"), "mya"), equalTo(AttributeKey.booleanKey("warm"), true), - equalTo(AttributeKey.longKey("temperature"), 30L), + equalTo(longKey("temperature"), 30L), equalTo(AttributeKey.doubleKey("length"), 1.2))) .isInstanceOf(AssertionError.class); assertThatThrownBy(() -> assertThat(LOG_DATA).hasTotalAttributeCount(11)) .isInstanceOf(AssertionError.class); } + + @Test + void logBodyAssertions() { + InMemoryLogRecordExporter exporter = InMemoryLogRecordExporter.create(); + SdkLoggerProvider loggerProvider = + SdkLoggerProvider.builder() + .addLogRecordProcessor(SimpleLogRecordProcessor.create(exporter)) + .build(); + Logger logger = loggerProvider.get("test.test"); + logger + .logRecordBuilder() + .setBody( + Value.of( + KeyValue.of("foostr", Value.of("bar")), + KeyValue.of("foobool", Value.of(true)), + KeyValue.of("foolong", Value.of(12L)), + KeyValue.of("foodbl", Value.of(12.0)), + KeyValue.of( + "foostra", Value.of(Value.of("bar"), Value.of("baz"), Value.of("buzz"))), + KeyValue.of( + "foolonga", + Value.of(Value.of(9), Value.of(0), Value.of(2), Value.of(1), Value.of(0))), + KeyValue.of( + "foodbla", + Value.of( + Value.of(9.1), Value.of(0.2), Value.of(2.3), Value.of(1.4), Value.of(0.5))), + KeyValue.of( + "fooboola", + Value.of(Value.of(true), Value.of(true), Value.of(true), Value.of(false))), + KeyValue.of("fooany", Value.of("grim")))) + .emit(); + List logs = exporter.getFinishedLogRecordItems(); + assertThat(logs).hasSize(1); + assertThat(logs.get(0)) + .hasBodyField("foostr", "bar") + .hasBodyField("foobool", true) + .hasBodyField("foolong", 12L) + .hasBodyField("foodbl", 12.0) + .hasBodyField("foostra", "bar", "baz", "buzz") + .hasBodyField("foolonga", 9, 0, 2, 1, 0) + .hasBodyField("foodbla", 9.1, 0.2, 2.3, 1.4, 0.5) + .hasBodyField("fooboola", true, true, true, false) + .hasBodyField("fooany", Value.of("grim")); + } } diff --git a/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/MetricAssertionsTest.java b/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/MetricAssertionsTest.java index 7d9fba741ea..299e4ab36fa 100644 --- a/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/MetricAssertionsTest.java +++ b/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/MetricAssertionsTest.java @@ -5,6 +5,7 @@ package io.opentelemetry.sdk.testing.assertj; +import static io.opentelemetry.api.common.AttributeKey.stringKey; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; @@ -60,9 +61,9 @@ class MetricAssertionsTest { private static final InstrumentationScopeInfo INSTRUMENTATION_SCOPE_INFO = InstrumentationScopeInfo.builder("opentelemetry").setVersion("1.0").build(); - private static final AttributeKey DOG = AttributeKey.stringKey("dog"); - private static final AttributeKey BEAR = AttributeKey.stringKey("bear"); - private static final AttributeKey CAT = AttributeKey.stringKey("cat"); + private static final AttributeKey DOG = stringKey("dog"); + private static final AttributeKey BEAR = stringKey("bear"); + private static final AttributeKey CAT = stringKey("cat"); private static final AttributeKey WARM = AttributeKey.booleanKey("warm"); private static final AttributeKey TEMPERATURE = AttributeKey.longKey("temperature"); private static final AttributeKey LENGTH = AttributeKey.doubleKey("length"); @@ -317,7 +318,7 @@ void doubleGauge() { attributes -> assertThat(attributes) .hasSize(2) - .containsEntry(AttributeKey.stringKey("dog"), "bark") + .containsEntry(stringKey("dog"), "bark") .hasEntrySatisfying(DOG, value -> assertThat(value).hasSize(4)) .hasEntrySatisfying( AttributeKey.booleanKey("dog is cute"), @@ -454,7 +455,19 @@ void doubleGauge() { equalTo(CONDITIONS, Arrays.asList(false, true)), equalTo(SCORES, Arrays.asList(0L, 1L)), equalTo(COINS, Arrays.asList(0.01, 0.05, 0.1)), - satisfies(LENGTH, val -> val.isCloseTo(1, offset(0.3)))))); + satisfies(LENGTH, val -> val.isCloseTo(1, offset(0.3)))) + .hasAttributesSatisfying( + attributes -> + assertThat(attributes) + .hasSize(8) + .containsEntry(stringKey("bear"), "mya") + .containsEntry("warm", true) + .containsEntry("temperature", 30L) + .containsEntry("colors", "red", "blue") + .containsEntry("conditions", false, true) + .containsEntry("scores", 0L, 1L) + .containsEntry("coins", 0.01, 0.05, 0.1) + .containsEntry("length", 1.2)))); } @Test @@ -500,7 +513,7 @@ void doubleGaugeFailure() { resource.hasAttributesSatisfying( attributes -> assertThat(attributes) - .containsEntry(AttributeKey.stringKey("dog"), "meow")))) + .containsEntry(stringKey("dog"), "meow")))) .isInstanceOf(AssertionError.class); assertThatThrownBy( () -> @@ -774,6 +787,29 @@ void doubleGaugeFailure() { satisfies( COINS, val -> val.containsExactly(0.01, 0.05, 0.1)))))) .isInstanceOf(AssertionError.class); + assertThatThrownBy( + () -> + assertThat(DOUBLE_GAUGE_METRIC) + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> point.hasAttributes(Attributes.empty()), + point -> + point.hasAttributesSatisfying( + attributes -> + assertThat(attributes) + .hasSize(8) + .containsEntry( + stringKey("bear"), + "WRONG BEAR NAME") // Failed here + .containsEntry("warm", true) + .containsEntry("temperature", 30L) + .containsEntry("colors", "red", "blue") + .containsEntry("conditions", false, true) + .containsEntry("scores", 0L, 1L) + .containsEntry("coins", 0.01, 0.05, 0.1) + .containsEntry("length", 1.2))))) + .isInstanceOf(AssertionError.class); } // The above tests verify shared behavior in AbstractPointDataAssert and MetricDataAssert so we diff --git a/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/TraceAssertionsTest.java b/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/TraceAssertionsTest.java index 825f067fdce..feb5eb9462a 100644 --- a/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/TraceAssertionsTest.java +++ b/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/assertj/TraceAssertionsTest.java @@ -680,6 +680,16 @@ void hasSpansSatisfyingExactly() { trace -> trace.hasSpansSatisfyingExactly( span -> span.hasSpanId(SPAN_ID1), span -> span.hasSpanId(SPAN_ID2))); + // wrong number of spans + assertThatThrownBy( + () -> + TracesAssert.assertThat(traces) + .hasTracesSatisfyingExactly( + trace -> trace.hasSpansSatisfyingExactly(span -> span.hasSpanId(SPAN_ID1)))) + .isInstanceOf(AssertionError.class) + .hasMessageStartingWith( + "[Trace 0] " + System.lineSeparator() + "Expected size: 1 but was: 2"); + // test asserting spans in wrong oder assertThatThrownBy( () -> @@ -689,7 +699,9 @@ void hasSpansSatisfyingExactly() { trace.hasSpansSatisfyingExactly( span -> span.hasSpanId(SPAN_ID2), span -> span.hasSpanId(SPAN_ID1)))) - .isInstanceOf(AssertionError.class); + .isInstanceOf(AssertionError.class) + .hasMessage( + "[Span 0] Expected span [span1] to have span ID <0000000000000004> but was <0000000000000003>"); // test asserting spans in any order TracesAssert.assertThat(traces) diff --git a/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/junit4/OpenTelemetryRuleTest.java b/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/junit4/OpenTelemetryRuleTest.java index a9450c471fb..c37cc996f5d 100644 --- a/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/junit4/OpenTelemetryRuleTest.java +++ b/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/junit4/OpenTelemetryRuleTest.java @@ -7,10 +7,20 @@ import static org.assertj.core.api.Assertions.assertThat; +import io.opentelemetry.api.baggage.Baggage; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.logs.Logger; import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.context.propagation.TextMapSetter; import io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions; +import java.util.HashMap; +import java.util.Map; +import javax.annotation.Nullable; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -21,11 +31,13 @@ public class OpenTelemetryRuleTest { private Tracer tracer; private Meter meter; + private Logger logger; @Before public void setup() { tracer = otelTesting.getOpenTelemetry().getTracer("test"); meter = otelTesting.getOpenTelemetry().getMeter("test"); + logger = otelTesting.getOpenTelemetry().getLogsBridge().get("test"); } @Test @@ -83,4 +95,62 @@ public void getMetricsAgain() { .hasLongSumSatisfying( sum -> sum.hasPointsSatisfying(point -> point.hasValue(1)))); } + + @Test + public void getLogRecords() { + logger.logRecordBuilder().setBody("body").emit(); + + assertThat(otelTesting.getLogRecords()) + .singleElement() + .satisfies( + logRecordData -> assertThat(logRecordData.getBodyValue()).isEqualTo(Value.of("body"))); + // Logs cleared between tests, not when retrieving + assertThat(otelTesting.getLogRecords()) + .singleElement() + .satisfies( + logRecordData -> assertThat(logRecordData.getBodyValue()).isEqualTo(Value.of("body"))); + } + + // We have two tests to verify logs get cleared up between tests. + @Test + public void getLogRecordsAgain() { + logger.logRecordBuilder().setBody("body").emit(); + + assertThat(otelTesting.getLogRecords()) + .singleElement() + .satisfies( + logRecordData -> assertThat(logRecordData.getBodyValue()).isEqualTo(Value.of("body"))); + // Logs cleared between tests, not when retrieving + assertThat(otelTesting.getLogRecords()) + .singleElement() + .satisfies( + logRecordData -> assertThat(logRecordData.getBodyValue()).isEqualTo(Value.of("body"))); + } + + @Test + public void baggageAndTracePropagation() { + OpenTelemetryRule rule = OpenTelemetryRule.create(); + Span span = rule.getOpenTelemetry().getTracer("test").spanBuilder("test").startSpan(); + try (Scope baggageScope = Baggage.builder().put("key", "value").build().makeCurrent(); + Scope spanScope = span.makeCurrent()) { + Map carrier = new HashMap<>(); + rule.getOpenTelemetry() + .getPropagators() + .getTextMapPropagator() + .inject(Context.current(), carrier, new MapTextMapSetter()); + assertThat(carrier).containsEntry("baggage", "key=value"); + assertThat(carrier).containsKey("traceparent"); + } finally { + span.end(); + } + } + + public static class MapTextMapSetter implements TextMapSetter> { + @Override + public void set(@Nullable Map carrier, String key, String value) { + if (carrier != null) { + carrier.put(key, value); + } + } + } } diff --git a/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/junit5/OpenTelemetryExtensionTest.java b/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/junit5/OpenTelemetryExtensionTest.java index 3f658bf0724..f13c41e9878 100644 --- a/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/junit5/OpenTelemetryExtensionTest.java +++ b/sdk/testing/src/test/java/io/opentelemetry/sdk/testing/junit5/OpenTelemetryExtensionTest.java @@ -10,16 +10,24 @@ import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.baggage.Baggage; +import io.opentelemetry.api.common.Value; +import io.opentelemetry.api.logs.Logger; import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; import io.opentelemetry.context.Scope; +import io.opentelemetry.context.propagation.TextMapSetter; import io.opentelemetry.sdk.trace.data.SpanData; import java.util.Arrays; +import java.util.HashMap; import java.util.LinkedHashMap; +import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import javax.annotation.Nullable; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; @@ -30,6 +38,7 @@ class OpenTelemetryExtensionTest { private final Tracer tracer = otelTesting.getOpenTelemetry().getTracer("test"); private final Meter meter = otelTesting.getOpenTelemetry().getMeter("test"); + private final Logger logger = otelTesting.getOpenTelemetry().getLogsBridge().get("test"); @Test public void getSpans() { @@ -175,6 +184,37 @@ void getMetricsAgain() { sum -> sum.hasPointsSatisfying(point -> point.hasValue(1)))); } + @Test + public void getLogRecords() { + logger.logRecordBuilder().setBody("body").emit(); + + assertThat(otelTesting.getLogRecords()) + .singleElement() + .satisfies( + logRecordData -> assertThat(logRecordData.getBodyValue()).isEqualTo(Value.of("body"))); + // Logs cleared between tests, not when retrieving + assertThat(otelTesting.getLogRecords()) + .singleElement() + .satisfies( + logRecordData -> assertThat(logRecordData.getBodyValue()).isEqualTo(Value.of("body"))); + } + + // We have two tests to verify spans get cleared up between tests. + @Test + public void getLogRecordsAgain() { + logger.logRecordBuilder().setBody("body").emit(); + + assertThat(otelTesting.getLogRecords()) + .singleElement() + .satisfies( + logRecordData -> assertThat(logRecordData.getBodyValue()).isEqualTo(Value.of("body"))); + // Logs cleared between tests, not when retrieving + assertThat(otelTesting.getLogRecords()) + .singleElement() + .satisfies( + logRecordData -> assertThat(logRecordData.getBodyValue()).isEqualTo(Value.of("body"))); + } + @Test void afterAll() { // Use a different instance of OpenTelemetryExtension to avoid interfering with other tests @@ -197,4 +237,32 @@ void afterAll() { assertThat(extension.getMetrics()).isEmpty(); assertThat(extension.getSpans()).isEmpty(); } + + @Test + void baggageAndTracePropagation() { + OpenTelemetryExtension extension = OpenTelemetryExtension.create(); + Span span = extension.getOpenTelemetry().getTracer("test").spanBuilder("test").startSpan(); + try (Scope baggageScope = Baggage.builder().put("key", "value").build().makeCurrent(); + Scope spanScope = span.makeCurrent()) { + Map carrier = new HashMap<>(); + extension + .getOpenTelemetry() + .getPropagators() + .getTextMapPropagator() + .inject(Context.current(), carrier, new MapTextMapSetter()); + assertThat(carrier).containsEntry("baggage", "key=value"); + assertThat(carrier).containsKey("traceparent"); + } finally { + span.end(); + } + } + + public static class MapTextMapSetter implements TextMapSetter> { + @Override + public void set(@Nullable Map carrier, String key, String value) { + if (carrier != null) { + carrier.put(key, value); + } + } + } } diff --git a/sdk/trace-shaded-deps/build.gradle.kts b/sdk/trace-shaded-deps/build.gradle.kts index e4cd9727909..1254aa3b600 100644 --- a/sdk/trace-shaded-deps/build.gradle.kts +++ b/sdk/trace-shaded-deps/build.gradle.kts @@ -1,7 +1,7 @@ plugins { id("otel.java-conventions") - id("com.github.johnrengelman.shadow") + id("com.gradleup.shadow") } // This project is not published, it is bundled into :sdk:trace diff --git a/sdk/trace-shaded-deps/src/main/java/io/opentelemetry/sdk/trace/internal/JcTools.java b/sdk/trace-shaded-deps/src/main/java/io/opentelemetry/sdk/trace/internal/JcTools.java index 06976ad13d5..fa009ebdc0f 100644 --- a/sdk/trace-shaded-deps/src/main/java/io/opentelemetry/sdk/trace/internal/JcTools.java +++ b/sdk/trace-shaded-deps/src/main/java/io/opentelemetry/sdk/trace/internal/JcTools.java @@ -64,21 +64,23 @@ public static long capacity(Queue queue) { * @throws IllegalArgumentException if maxExportBatchSize is negative */ @SuppressWarnings("unchecked") - public static void drain(Queue queue, int limit, Consumer consumer) { + public static int drain(Queue queue, int limit, Consumer consumer) { if (queue instanceof MessagePassingQueue) { - ((MessagePassingQueue) queue).drain(consumer::accept, limit); + return ((MessagePassingQueue) queue).drain(consumer::accept, limit); } else { - drainNonJcQueue(queue, limit, consumer); + return drainNonJcQueue(queue, limit, consumer); } } - private static void drainNonJcQueue( + private static int drainNonJcQueue( Queue queue, int maxExportBatchSize, Consumer consumer) { int polledCount = 0; T item; - while (polledCount++ < maxExportBatchSize && (item = queue.poll()) != null) { + while (polledCount < maxExportBatchSize && (item = queue.poll()) != null) { consumer.accept(item); + ++polledCount; } + return polledCount; } private JcTools() {} diff --git a/sdk/trace/build.gradle.kts b/sdk/trace/build.gradle.kts index 2f19cea68ec..35d30c8513b 100644 --- a/sdk/trace/build.gradle.kts +++ b/sdk/trace/build.gradle.kts @@ -22,6 +22,7 @@ dependencies { api(project(":api:all")) api(project(":sdk:common")) + compileOnly(project(":api:incubator")) compileOnly(project(":sdk:trace-shaded-deps")) annotationProcessor("com.google.auto.value:auto-value") @@ -39,7 +40,6 @@ dependencies { // dependencies. isTransitive = false } - jmh(project(":exporters:jaeger-thrift")) jmh(project(":exporters:otlp:all")) { // The opentelemetry-exporter-otlp depends on this project itself. So don't pull in // the transitive dependencies. @@ -49,6 +49,13 @@ dependencies { jmh(project(":exporters:otlp:common")) { isTransitive = false } + jmh(project(":exporters:common")) { + isTransitive = false + } + jmh(project(":exporters:sender:okhttp")) + jmh(project(":sdk-extensions:autoconfigure-spi")) { + isTransitive = false + } jmh("io.opentelemetry.proto:opentelemetry-proto") jmh("com.google.guava:guava") @@ -57,6 +64,23 @@ dependencies { jmh("org.testcontainers:testcontainers") // testContainer for OTLP collector } +testing { + suites { + register("testIncubating") { + dependencies { + implementation(project(":sdk:testing")) + implementation(project(":api:incubator")) + } + } + } +} + +tasks { + check { + dependsOn(testing.suites) + } +} + tasks { withType().configureEach { // We catch NoClassDefFoundError to fallback to non-jctools queues. diff --git a/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/ExceptionBenchmark.java b/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/ExceptionBenchmark.java index 438deb7b69d..7dd1298345b 100644 --- a/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/ExceptionBenchmark.java +++ b/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/ExceptionBenchmark.java @@ -25,6 +25,7 @@ @State(Scope.Benchmark) public class ExceptionBenchmark { + @SuppressWarnings("NonFinalStaticField") private static SpanBuilder spanBuilder; @Setup(Level.Trial) diff --git a/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/ExporterBenchmark.java b/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/ExporterBenchmark.java index 28b06257911..72abbe0541d 100644 --- a/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/ExporterBenchmark.java +++ b/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/ExporterBenchmark.java @@ -36,9 +36,8 @@ private ExporterBenchmark() {} @State(Scope.Benchmark) public abstract static class AbstractProcessorBenchmark { private static final DockerImageName OTLP_COLLECTOR_IMAGE = - DockerImageName.parse("otel/opentelemetry-collector-dev:latest"); + DockerImageName.parse("otel/opentelemetry-collector-contrib:latest"); protected static final int OTLP_PORT = 5678; - protected static final int JAEGER_PORT = 14268; private static final int HEALTH_CHECK_PORT = 13133; protected SdkSpanBuilder sdkSpanBuilder; @@ -49,7 +48,7 @@ public void setup() { // Configuring the collector test-container GenericContainer collector = new GenericContainer<>(OTLP_COLLECTOR_IMAGE) - .withExposedPorts(OTLP_PORT, HEALTH_CHECK_PORT, JAEGER_PORT) + .withExposedPorts(OTLP_PORT, HEALTH_CHECK_PORT) .waitingFor(Wait.forHttp("/").forPort(HEALTH_CHECK_PORT)) .withCopyFileToContainer( MountableFile.forClasspathResource("/otel.yaml"), "/etc/otel.yaml") @@ -92,17 +91,4 @@ protected OtlpGrpcSpanExporter createExporter(GenericContainer collector) { .build(); } } - - @SuppressWarnings("deprecation") // Benchmarking deprecated code - public static class JaegerBenchmark extends AbstractProcessorBenchmark { - @Override - protected io.opentelemetry.exporter.jaeger.thrift.JaegerThriftSpanExporter createExporter( - GenericContainer collector) { - String host = collector.getHost(); - int port = collector.getMappedPort(JAEGER_PORT); - return io.opentelemetry.exporter.jaeger.thrift.JaegerThriftSpanExporter.builder() - .setEndpoint("http://" + host + ":" + port + "/api/traces") - .build(); - } - } } diff --git a/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/SpanBenchmark.java b/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/SpanBenchmark.java index c8063e1bb07..cf0cf3737c8 100644 --- a/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/SpanBenchmark.java +++ b/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/SpanBenchmark.java @@ -24,7 +24,9 @@ @State(Scope.Benchmark) public class SpanBenchmark { + @SuppressWarnings("NonFinalStaticField") private static SdkSpanBuilder sdkSpanBuilder; + private final Resource serviceResource = Resource.create( Attributes.builder() diff --git a/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/SpanPipelineBenchmark.java b/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/SpanPipelineBenchmark.java index 03684d1a348..cbb9ee97b50 100644 --- a/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/SpanPipelineBenchmark.java +++ b/sdk/trace/src/jmh/java/io/opentelemetry/sdk/trace/SpanPipelineBenchmark.java @@ -38,7 +38,7 @@ private SpanPipelineBenchmark() {} @State(Scope.Benchmark) public abstract static class AbstractProcessorBenchmark { private static final DockerImageName OTLP_COLLECTOR_IMAGE = - DockerImageName.parse("otel/opentelemetry-collector-dev:latest"); + DockerImageName.parse("otel/opentelemetry-collector-contrib:latest"); private static final int EXPOSED_PORT = 5678; private static final int HEALTH_CHECK_PORT = 13133; private Tracer tracer; diff --git a/sdk/trace/src/jmh/resources/otel.yaml b/sdk/trace/src/jmh/resources/otel.yaml index 316ff8da1ef..b2fe534507f 100644 --- a/sdk/trace/src/jmh/resources/otel.yaml +++ b/sdk/trace/src/jmh/resources/otel.yaml @@ -3,9 +3,6 @@ receivers: protocols: grpc: endpoint: 0.0.0.0:5678 - jaeger: - protocols: - thrift_http: processors: batch: @@ -14,12 +11,12 @@ extensions: health_check: exporters: - logging: + debug: service: extensions: [health_check] pipelines: traces: - receivers: [otlp, jaeger] + receivers: [otlp] processors: [batch] - exporters: [logging] + exporters: [debug] diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/ExtendedSdkSpanBuilder.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/ExtendedSdkSpanBuilder.java new file mode 100644 index 00000000000..b68030202ce --- /dev/null +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/ExtendedSdkSpanBuilder.java @@ -0,0 +1,159 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.trace; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.incubator.propagation.ExtendedContextPropagators; +import io.opentelemetry.api.incubator.trace.ExtendedSpanBuilder; +import io.opentelemetry.api.incubator.trace.SpanCallable; +import io.opentelemetry.api.incubator.trace.SpanRunnable; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.api.trace.StatusCode; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; + +/** {@link ExtendedSdkSpanBuilder} is SDK implementation of {@link ExtendedSpanBuilder}. */ +final class ExtendedSdkSpanBuilder extends SdkSpanBuilder implements ExtendedSpanBuilder { + + ExtendedSdkSpanBuilder( + String spanName, + InstrumentationScopeInfo instrumentationScopeInfo, + TracerSharedState tracerSharedState, + SpanLimits spanLimits) { + super(spanName, instrumentationScopeInfo, tracerSharedState, spanLimits); + } + + @Override + public ExtendedSpanBuilder setParent(Context context) { + super.setParent(context); + return this; + } + + @Override + public ExtendedSpanBuilder setNoParent() { + super.setNoParent(); + return this; + } + + @Override + public ExtendedSpanBuilder setSpanKind(SpanKind spanKind) { + super.setSpanKind(spanKind); + return this; + } + + @Override + public ExtendedSpanBuilder addLink(SpanContext spanContext) { + super.addLink(spanContext); + return this; + } + + @Override + public ExtendedSpanBuilder addLink(SpanContext spanContext, Attributes attributes) { + super.addLink(spanContext, attributes); + return this; + } + + @Override + public ExtendedSpanBuilder setAttribute(String key, String value) { + super.setAttribute(key, value); + return this; + } + + @Override + public ExtendedSpanBuilder setAttribute(String key, long value) { + super.setAttribute(key, value); + return this; + } + + @Override + public ExtendedSpanBuilder setAttribute(String key, double value) { + super.setAttribute(key, value); + return this; + } + + @Override + public ExtendedSpanBuilder setAttribute(String key, boolean value) { + super.setAttribute(key, value); + return this; + } + + @Override + public ExtendedSpanBuilder setAttribute(AttributeKey key, T value) { + super.setAttribute(key, value); + return this; + } + + @Override + public ExtendedSpanBuilder setStartTimestamp(long startTimestamp, TimeUnit unit) { + super.setStartTimestamp(startTimestamp, unit); + return this; + } + + @Override + public ExtendedSpanBuilder setParentFrom( + ContextPropagators propagators, Map carrier) { + super.setParent( + ExtendedContextPropagators.extractTextMapPropagationContext(carrier, propagators)); + return this; + } + + @Override + public T startAndCall(SpanCallable spanCallable) throws E { + return startAndCall(spanCallable, ExtendedSdkSpanBuilder::setSpanError); + } + + @Override + public T startAndCall( + SpanCallable spanCallable, BiConsumer handleException) throws E { + Span span = startSpan(); + + //noinspection unused + try (Scope unused = span.makeCurrent()) { + return spanCallable.callInSpan(); + } catch (Throwable e) { + handleException.accept(span, e); + throw e; + } finally { + span.end(); + } + } + + @Override + public void startAndRun(SpanRunnable runnable) throws E { + startAndRun(runnable, ExtendedSdkSpanBuilder::setSpanError); + } + + @SuppressWarnings("NullAway") + @Override + public void startAndRun( + SpanRunnable runnable, BiConsumer handleException) throws E { + startAndCall( + () -> { + runnable.runInSpan(); + return null; + }, + handleException); + } + + /** + * Marks a span as error. This is the default exception handler. + * + * @param span the span + * @param exception the exception that caused the error + */ + private static void setSpanError(Span span, Throwable exception) { + span.setStatus(StatusCode.ERROR); + span.recordException(exception); + } +} diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/ExtendedSdkTracer.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/ExtendedSdkTracer.java new file mode 100644 index 00000000000..0c1d1c8e8b9 --- /dev/null +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/ExtendedSdkTracer.java @@ -0,0 +1,32 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.trace; + +import io.opentelemetry.api.incubator.trace.ExtendedSpanBuilder; +import io.opentelemetry.api.incubator.trace.ExtendedTracer; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.trace.internal.TracerConfig; + +/** {@link ExtendedSdkTracer} is SDK implementation of {@link ExtendedTracer}. */ +final class ExtendedSdkTracer extends SdkTracer implements ExtendedTracer { + + ExtendedSdkTracer( + TracerSharedState sharedState, + InstrumentationScopeInfo instrumentationScopeInfo, + TracerConfig tracerConfig) { + super(sharedState, instrumentationScopeInfo, tracerConfig); + } + + @Override + public boolean isEnabled() { + return tracerEnabled; + } + + @Override + public ExtendedSpanBuilder spanBuilder(String spanName) { + return (ExtendedSpanBuilder) super.spanBuilder(spanName); + } +} diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/IncubatingUtil.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/IncubatingUtil.java new file mode 100644 index 00000000000..c7ab15e9c2e --- /dev/null +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/IncubatingUtil.java @@ -0,0 +1,35 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.trace; + +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.trace.internal.TracerConfig; + +/** + * Utilities for interacting with {@code io.opentelemetry:opentelemetry-api-incubator}, which is not + * guaranteed to be present on the classpath. For all methods, callers MUST first separately + * reflectively confirm that the incubator is available on the classpath. + */ +final class IncubatingUtil { + + private IncubatingUtil() {} + + static SdkTracer createExtendedTracer( + TracerSharedState sharedState, + InstrumentationScopeInfo instrumentationScopeInfo, + TracerConfig tracerConfig) { + return new ExtendedSdkTracer(sharedState, instrumentationScopeInfo, tracerConfig); + } + + static SdkSpanBuilder createExtendedSpanBuilder( + String spanName, + InstrumentationScopeInfo instrumentationScopeInfo, + TracerSharedState tracerSharedState, + SpanLimits spanLimits) { + return new ExtendedSdkSpanBuilder( + spanName, instrumentationScopeInfo, tracerSharedState, spanLimits); + } +} diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/MultiSpanProcessor.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/MultiSpanProcessor.java index 705351e83f5..65b37096568 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/MultiSpanProcessor.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/MultiSpanProcessor.java @@ -7,6 +7,7 @@ import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.internal.ExtendedSpanProcessor; import java.util.ArrayList; import java.util.List; import java.util.Objects; @@ -16,8 +17,9 @@ * Implementation of the {@code SpanProcessor} that simply forwards all received events to a list of * {@code SpanProcessor}s. */ -final class MultiSpanProcessor implements SpanProcessor { +final class MultiSpanProcessor implements ExtendedSpanProcessor { private final List spanProcessorsStart; + private final List spanProcessorsEnding; private final List spanProcessorsEnd; private final List spanProcessorsAll; private final AtomicBoolean isShutdown = new AtomicBoolean(false); @@ -35,9 +37,9 @@ static SpanProcessor create(List spanProcessorList) { } @Override - public void onStart(Context parentContext, ReadWriteSpan readableSpan) { + public void onStart(Context parentContext, ReadWriteSpan readWriteSpan) { for (SpanProcessor spanProcessor : spanProcessorsStart) { - spanProcessor.onStart(parentContext, readableSpan); + spanProcessor.onStart(parentContext, readWriteSpan); } } @@ -58,6 +60,18 @@ public boolean isEndRequired() { return !spanProcessorsEnd.isEmpty(); } + @Override + public void onEnding(ReadWriteSpan span) { + for (ExtendedSpanProcessor spanProcessor : spanProcessorsEnding) { + spanProcessor.onEnding(span); + } + } + + @Override + public boolean isOnEndingRequired() { + return !spanProcessorsEnding.isEmpty(); + } + @Override public CompletableResultCode shutdown() { if (isShutdown.getAndSet(true)) { @@ -83,10 +97,17 @@ private MultiSpanProcessor(List spanProcessors) { this.spanProcessorsAll = spanProcessors; this.spanProcessorsStart = new ArrayList<>(spanProcessorsAll.size()); this.spanProcessorsEnd = new ArrayList<>(spanProcessorsAll.size()); + this.spanProcessorsEnding = new ArrayList<>(spanProcessorsAll.size()); for (SpanProcessor spanProcessor : spanProcessorsAll) { if (spanProcessor.isStartRequired()) { spanProcessorsStart.add(spanProcessor); } + if (spanProcessor instanceof ExtendedSpanProcessor) { + ExtendedSpanProcessor extendedSpanProcessor = (ExtendedSpanProcessor) spanProcessor; + if (extendedSpanProcessor.isOnEndingRequired()) { + spanProcessorsEnding.add(extendedSpanProcessor); + } + } if (spanProcessor.isEndRequired()) { spanProcessorsEnd.add(spanProcessor); } @@ -98,6 +119,8 @@ public String toString() { return "MultiSpanProcessor{" + "spanProcessorsStart=" + spanProcessorsStart + + ", spanProcessorsEnding=" + + spanProcessorsEnding + ", spanProcessorsEnd=" + spanProcessorsEnd + ", spanProcessorsAll=" diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/ReadableSpan.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/ReadableSpan.java index f367aa67b28..5e6ef1cf962 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/ReadableSpan.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/ReadableSpan.java @@ -6,6 +6,7 @@ package io.opentelemetry.sdk.trace; import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.api.trace.SpanKind; @@ -111,4 +112,21 @@ default InstrumentationScopeInfo getInstrumentationScopeInfo() { */ @Nullable T getAttribute(AttributeKey key); + + /** + * Returns the Span attributes. + * + *

    Attributes can be changed during the lifetime of the Span by using {@link + * Span#setAttribute}} so this value cannot be cached. + * + *

    Note: the implementation of this method performs locking and returns an immutable copy to + * ensure thread-safe behavior. If you only need a single attribute it is better to call {@link + * #getAttribute(AttributeKey)}. + * + * @return the Span attributes, or {@link Attributes#empty()} if the span has no attributes. + * @since 1.38.0 + */ + default Attributes getAttributes() { + return Attributes.empty(); + } } diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkSpan.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkSpan.java index c6e72632547..c1c788bf931 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkSpan.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkSpan.java @@ -20,10 +20,13 @@ import io.opentelemetry.sdk.internal.InstrumentationScopeUtil; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.trace.data.EventData; +import io.opentelemetry.sdk.trace.data.ExceptionEventData; import io.opentelemetry.sdk.trace.data.LinkData; import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.data.StatusData; -import io.opentelemetry.sdk.trace.internal.data.ExceptionEventData; +import io.opentelemetry.sdk.trace.internal.ExtendedSpanProcessor; +import java.io.PrintWriter; +import java.io.StringWriter; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -47,11 +50,6 @@ final class SdkSpan implements ReadWriteSpan { private final SpanContext parentSpanContext; // Handler called when the span starts and ends. private final SpanProcessor spanProcessor; - // The displayed name of the span. - // List of recorded links to parent and child spans. - private final List links; - // Number of links recorded. - private final int totalRecordedLinks; // The kind of the span. private final SpanKind kind; // The clock used to get the time. @@ -75,12 +73,23 @@ final class SdkSpan implements ReadWriteSpan { // List of recorded events. @GuardedBy("lock") - private final List events; + @Nullable + private List events; // Number of events recorded. @GuardedBy("lock") private int totalRecordedEvents = 0; + // The displayed name of the span. + // List of recorded links to parent and child spans. + @GuardedBy("lock") + @Nullable + List links; + + // Number of links recorded. + @GuardedBy("lock") + private int totalRecordedLinks; + // The status of the span. @GuardedBy("lock") private StatusData status = StatusData.unset(); @@ -89,9 +98,31 @@ final class SdkSpan implements ReadWriteSpan { @GuardedBy("lock") private long endEpochNanos; - // True if the span is ended. + private enum EndState { + NOT_ENDED, + ENDING, + ENDED + } + @GuardedBy("lock") - private boolean hasEnded; + private EndState hasEnded; + + /** + * The thread on which {@link #end()} is called and which will be invoking the {@link + * SpanProcessor}s. This field is used to ensure that only this thread may modify the span while + * it is in state {@link EndState#ENDING} to prevent concurrent updates outside of {@link + * ExtendedSpanProcessor#onEnding(ReadWriteSpan)}. + */ + @GuardedBy("lock") + @Nullable + private Thread spanEndingThread; + + private static final AttributeKey EXCEPTION_TYPE = + AttributeKey.stringKey("exception.type"); + private static final AttributeKey EXCEPTION_MESSAGE = + AttributeKey.stringKey("exception.message"); + private static final AttributeKey EXCEPTION_STACKTRACE = + AttributeKey.stringKey("exception.stacktrace"); private SdkSpan( SpanContext context, @@ -104,7 +135,7 @@ private SdkSpan( AnchoredClock clock, Resource resource, @Nullable AttributesMap attributes, - List links, + @Nullable List links, int totalRecordedLinks, long startEpochNanos) { this.context = context; @@ -116,11 +147,10 @@ private SdkSpan( this.kind = kind; this.spanProcessor = spanProcessor; this.resource = resource; - this.hasEnded = false; + this.hasEnded = EndState.NOT_ENDED; this.clock = clock; this.startEpochNanos = startEpochNanos; this.attributes = attributes; - this.events = new ArrayList<>(); this.spanLimits = spanLimits; } @@ -151,7 +181,7 @@ static SdkSpan startSpan( Clock tracerClock, Resource resource, @Nullable AttributesMap attributes, - List links, + @Nullable List links, int totalRecordedLinks, long userStartEpochNanos) { boolean createdAnchoredClock; @@ -194,7 +224,9 @@ static SdkSpan startSpan( startEpochNanos); // Call onStart here instead of calling in the constructor to make sure the span is completely // initialized. - spanProcessor.onStart(parentContext, span); + if (spanProcessor.isStartRequired()) { + spanProcessor.onStart(parentContext, span); + } return span; } @@ -204,15 +236,16 @@ public SpanData toSpanData() { synchronized (lock) { return SpanWrapper.create( this, - links, + getImmutableLinks(), getImmutableTimedEvents(), getImmutableAttributes(), (attributes == null) ? 0 : attributes.getTotalAddedValues(), totalRecordedEvents, + totalRecordedLinks, status, name, endEpochNanos, - hasEnded); + hasEnded == EndState.ENDED); } } @@ -224,10 +257,17 @@ public T getAttribute(AttributeKey key) { } } + @Override + public Attributes getAttributes() { + synchronized (lock) { + return attributes == null ? Attributes.empty() : attributes.immutableCopy(); + } + } + @Override public boolean hasEnded() { synchronized (lock) { - return hasEnded; + return hasEnded == EndState.ENDED; } } @@ -273,7 +313,7 @@ public InstrumentationScopeInfo getInstrumentationScopeInfo() { @Override public long getLatencyNanos() { synchronized (lock) { - return (hasEnded ? endEpochNanos : clock.now()) - startEpochNanos; + return (hasEnded == EndState.NOT_ENDED ? clock.now() : endEpochNanos) - startEpochNanos; } } @@ -288,7 +328,7 @@ public ReadWriteSpan setAttribute(AttributeKey key, T value) { return this; } synchronized (lock) { - if (hasEnded) { + if (!isModifiableByCurrentThread()) { logger.log(Level.FINE, "Calling setAttribute() on an ended Span."); return this; } @@ -303,6 +343,12 @@ public ReadWriteSpan setAttribute(AttributeKey key, T value) { return this; } + @GuardedBy("lock") + private boolean isModifiableByCurrentThread() { + return hasEnded == EndState.NOT_ENDED + || (hasEnded == EndState.ENDING && Thread.currentThread() == spanEndingThread); + } + @Override public ReadWriteSpan addEvent(String name) { if (name == null) { @@ -365,10 +411,13 @@ public ReadWriteSpan addEvent(String name, Attributes attributes, long timestamp private void addTimedEvent(EventData timedEvent) { synchronized (lock) { - if (hasEnded) { + if (!isModifiableByCurrentThread()) { logger.log(Level.FINE, "Calling addEvent() on an ended Span."); return; } + if (events == null) { + events = new ArrayList<>(); + } if (events.size() < spanLimits.getMaxNumberOfEvents()) { events.add(timedEvent); } @@ -382,10 +431,29 @@ public ReadWriteSpan setStatus(StatusCode statusCode, @Nullable String descripti return this; } synchronized (lock) { - if (hasEnded) { + if (!isModifiableByCurrentThread()) { logger.log(Level.FINE, "Calling setStatus() on an ended Span."); return this; } + + // If current status is OK, ignore further attempts to change it + if (this.status.getStatusCode() == StatusCode.OK) { + logger.log(Level.FINE, "Calling setStatus() on a Span that is already set to OK."); + return this; + } + + // Ignore attempts to set status to UNSET + if (statusCode == StatusCode.UNSET) { + logger.log(Level.FINE, "Ignoring call to setStatus() with status UNSET."); + return this; + } + + // Ignore description when status is not ERROR + if (description != null && statusCode != StatusCode.ERROR) { + logger.log(Level.FINE, "Ignoring setStatus() description since status is not ERROR."); + description = null; + } + this.status = StatusData.create(statusCode, description); } return this; @@ -398,6 +466,7 @@ public ReadWriteSpan recordException(Throwable exception) { } @Override + @SuppressWarnings("unchecked") public ReadWriteSpan recordException(Throwable exception, Attributes additionalAttributes) { if (exception == null) { return this; @@ -406,8 +475,32 @@ public ReadWriteSpan recordException(Throwable exception, Attributes additionalA additionalAttributes = Attributes.empty(); } + AttributesMap attributes = + AttributesMap.create( + spanLimits.getMaxNumberOfAttributes(), spanLimits.getMaxAttributeValueLength()); + String exceptionName = exception.getClass().getCanonicalName(); + String exceptionMessage = exception.getMessage(); + StringWriter stringWriter = new StringWriter(); + try (PrintWriter printWriter = new PrintWriter(stringWriter)) { + exception.printStackTrace(printWriter); + } + String stackTrace = stringWriter.toString(); + + if (exceptionName != null) { + attributes.put(EXCEPTION_TYPE, exceptionName); + } + if (exceptionMessage != null) { + attributes.put(EXCEPTION_MESSAGE, exceptionMessage); + } + if (stackTrace != null) { + attributes.put(EXCEPTION_STACKTRACE, stackTrace); + } + + additionalAttributes.forEach(attributes::put); + addTimedEvent( - ExceptionEventData.create(spanLimits, clock.now(), exception, additionalAttributes)); + ExceptionEventData.create( + clock.now(), exception, attributes, attributes.getTotalAddedValues())); return this; } @@ -417,7 +510,7 @@ public ReadWriteSpan updateName(String name) { return this; } synchronized (lock) { - if (hasEnded) { + if (!isModifiableByCurrentThread()) { logger.log(Level.FINE, "Calling updateName() on an ended Span."); return this; } @@ -426,6 +519,37 @@ public ReadWriteSpan updateName(String name) { return this; } + @Override + public Span addLink(SpanContext spanContext, Attributes attributes) { + if (spanContext == null || !spanContext.isValid()) { + return this; + } + if (attributes == null) { + attributes = Attributes.empty(); + } + LinkData link = + LinkData.create( + spanContext, + AttributeUtil.applyAttributesLimit( + attributes, + spanLimits.getMaxNumberOfAttributesPerLink(), + spanLimits.getMaxAttributeValueLength())); + synchronized (lock) { + if (!isModifiableByCurrentThread()) { + logger.log(Level.FINE, "Calling addLink() on an ended Span."); + return this; + } + if (links == null) { + links = new ArrayList<>(); + } + if (links.size() < spanLimits.getMaxNumberOfLinks()) { + links.add(link); + } + totalRecordedLinks++; + } + return this; + } + @Override public void end() { endInternal(clock.now()); @@ -441,20 +565,32 @@ public void end(long timestamp, TimeUnit unit) { private void endInternal(long endEpochNanos) { synchronized (lock) { - if (hasEnded) { - logger.log(Level.FINE, "Calling end() on an ended Span."); + if (hasEnded != EndState.NOT_ENDED) { + logger.log(Level.FINE, "Calling end() on an ended or ending Span."); return; } this.endEpochNanos = endEpochNanos; - hasEnded = true; + spanEndingThread = Thread.currentThread(); + hasEnded = EndState.ENDING; + } + if (spanProcessor instanceof ExtendedSpanProcessor) { + ExtendedSpanProcessor extendedSpanProcessor = (ExtendedSpanProcessor) spanProcessor; + if (extendedSpanProcessor.isOnEndingRequired()) { + extendedSpanProcessor.onEnding(this); + } + } + synchronized (lock) { + hasEnded = EndState.ENDED; + } + if (spanProcessor.isEndRequired()) { + spanProcessor.onEnd(this); } - spanProcessor.onEnd(this); } @Override public boolean isRecording() { synchronized (lock) { - return !hasEnded; + return hasEnded != EndState.ENDED; } } @@ -471,19 +607,15 @@ long getStartEpochNanos() { return startEpochNanos; } - int getTotalRecordedLinks() { - return totalRecordedLinks; - } - @GuardedBy("lock") private List getImmutableTimedEvents() { - if (events.isEmpty()) { + if (events == null) { return Collections.emptyList(); } // if the span has ended, then the events are unmodifiable // so we can return them directly and save copying all the data. - if (hasEnded) { + if (hasEnded == EndState.ENDED) { return Collections.unmodifiableList(events); } @@ -497,13 +629,21 @@ private Attributes getImmutableAttributes() { } // if the span has ended, then the attributes are unmodifiable, // so we can return them directly and save copying all the data. - if (hasEnded) { + if (hasEnded == EndState.ENDED) { return attributes; } // otherwise, make a copy of the data into an immutable container. return attributes.immutableCopy(); } + @GuardedBy("lock") + private List getImmutableLinks() { + if (links == null || links.isEmpty()) { + return Collections.emptyList(); + } + return Collections.unmodifiableList(links); + } + @Override public String toString() { String name; @@ -511,12 +651,14 @@ public String toString() { String status; long totalRecordedEvents; long endEpochNanos; + long totalRecordedLinks; synchronized (lock) { name = this.name; attributes = String.valueOf(this.attributes); status = String.valueOf(this.status); totalRecordedEvents = this.totalRecordedEvents; endEpochNanos = this.endEpochNanos; + totalRecordedLinks = this.totalRecordedLinks; } return "SdkSpan{traceId=" + context.getTraceId() diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkSpanBuilder.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkSpanBuilder.java index 4d6fca7fe41..b12107c7d43 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkSpanBuilder.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkSpanBuilder.java @@ -33,7 +33,7 @@ import javax.annotation.Nullable; /** {@link SdkSpanBuilder} is SDK implementation of {@link SpanBuilder}. */ -final class SdkSpanBuilder implements SpanBuilder { +class SdkSpanBuilder implements SpanBuilder { private final String spanName; private final InstrumentationScopeInfo instrumentationScopeInfo; @@ -179,8 +179,9 @@ public Span startSpan() { // New child span. traceId = parentSpanContext.getTraceId(); } + List currentLinks = links; List immutableLinks = - links == null ? Collections.emptyList() : Collections.unmodifiableList(links); + currentLinks == null ? Collections.emptyList() : Collections.unmodifiableList(currentLinks); // Avoid any possibility to modify the links list by adding links to the Builder after the // startSpan is called. If that happens all the links will be added in a new list. links = null; @@ -228,7 +229,7 @@ public Span startSpan() { tracerSharedState.getClock(), tracerSharedState.getResource(), recordedAttributes, - immutableLinks, + currentLinks, totalNumberOfLinksAdded, startEpochNanos); } diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkTracer.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkTracer.java index 81282097cd1..b148d0cf8c6 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkTracer.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkTracer.java @@ -9,34 +9,82 @@ import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.api.trace.TracerProvider; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.trace.internal.TracerConfig; /** {@link SdkTracer} is SDK implementation of {@link Tracer}. */ -final class SdkTracer implements Tracer { +class SdkTracer implements Tracer { static final String FALLBACK_SPAN_NAME = ""; + private static final Tracer NOOP_TRACER = TracerProvider.noop().get("noop"); + private static final boolean INCUBATOR_AVAILABLE; + + static { + boolean incubatorAvailable = false; + try { + Class.forName("io.opentelemetry.api.incubator.trace.ExtendedDefaultTracerProvider"); + incubatorAvailable = true; + } catch (ClassNotFoundException e) { + // Not available + } + INCUBATOR_AVAILABLE = incubatorAvailable; + } private final TracerSharedState sharedState; private final InstrumentationScopeInfo instrumentationScopeInfo; + // deliberately not volatile because of performance concerns + // - which means its eventually consistent + protected boolean tracerEnabled; - SdkTracer(TracerSharedState sharedState, InstrumentationScopeInfo instrumentationScopeInfo) { + SdkTracer( + TracerSharedState sharedState, + InstrumentationScopeInfo instrumentationScopeInfo, + TracerConfig tracerConfig) { this.sharedState = sharedState; this.instrumentationScopeInfo = instrumentationScopeInfo; + this.tracerEnabled = tracerConfig.isEnabled(); } + static SdkTracer create( + TracerSharedState sharedState, + InstrumentationScopeInfo instrumentationScopeInfo, + TracerConfig tracerConfig) { + return INCUBATOR_AVAILABLE + ? IncubatingUtil.createExtendedTracer(sharedState, instrumentationScopeInfo, tracerConfig) + : new SdkTracer(sharedState, instrumentationScopeInfo, tracerConfig); + } + + /** + * Note that {@link ExtendedSdkTracer#spanBuilder(String)} calls this and depends on it returning + * {@link ExtendedSdkTracer} in all cases when the incubator is present. + */ @Override public SpanBuilder spanBuilder(String spanName) { + if (!tracerEnabled) { + return NOOP_TRACER.spanBuilder(spanName); + } if (spanName == null || spanName.trim().isEmpty()) { spanName = FALLBACK_SPAN_NAME; } if (sharedState.hasBeenShutdown()) { - Tracer tracer = TracerProvider.noop().get(instrumentationScopeInfo.getName()); - return tracer.spanBuilder(spanName); + return NOOP_TRACER.spanBuilder(spanName); } - return new SdkSpanBuilder( - spanName, instrumentationScopeInfo, sharedState, sharedState.getSpanLimits()); + return INCUBATOR_AVAILABLE + ? IncubatingUtil.createExtendedSpanBuilder( + spanName, instrumentationScopeInfo, sharedState, sharedState.getSpanLimits()) + : new SdkSpanBuilder( + spanName, instrumentationScopeInfo, sharedState, sharedState.getSpanLimits()); } // Visible for testing InstrumentationScopeInfo getInstrumentationScopeInfo() { return instrumentationScopeInfo; } + + // Visible for testing + boolean isEnabled() { + return tracerEnabled; + } + + void updateTracerConfig(TracerConfig tracerConfig) { + this.tracerEnabled = tracerConfig.isEnabled(); + } } diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkTracerProvider.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkTracerProvider.java index 2e07af579a9..7bd69a24236 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkTracerProvider.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkTracerProvider.java @@ -10,8 +10,12 @@ import io.opentelemetry.api.trace.TracerProvider; import io.opentelemetry.sdk.common.Clock; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.internal.ComponentRegistry; +import io.opentelemetry.sdk.internal.ScopeConfigurator; import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.internal.SdkTracerProviderUtil; +import io.opentelemetry.sdk.trace.internal.TracerConfig; import io.opentelemetry.sdk.trace.samplers.Sampler; import java.io.Closeable; import java.util.List; @@ -27,6 +31,9 @@ public final class SdkTracerProvider implements TracerProvider, Closeable { static final String DEFAULT_TRACER_NAME = ""; private final TracerSharedState sharedState; private final ComponentRegistry tracerSdkComponentRegistry; + // deliberately not volatile because of performance concerns + // - which means its eventually consistent + private ScopeConfigurator tracerConfigurator; /** * Returns a new {@link SdkTracerProviderBuilder} for {@link SdkTracerProvider}. @@ -37,19 +44,31 @@ public static SdkTracerProviderBuilder builder() { return new SdkTracerProviderBuilder(); } + @SuppressWarnings("NonApiType") SdkTracerProvider( Clock clock, IdGenerator idsGenerator, Resource resource, Supplier spanLimitsSupplier, Sampler sampler, - List spanProcessors) { + List spanProcessors, + ScopeConfigurator tracerConfigurator) { this.sharedState = new TracerSharedState( clock, idsGenerator, resource, spanLimitsSupplier, sampler, spanProcessors); this.tracerSdkComponentRegistry = new ComponentRegistry<>( - instrumentationScopeInfo -> new SdkTracer(sharedState, instrumentationScopeInfo)); + instrumentationScopeInfo -> + SdkTracer.create( + sharedState, + instrumentationScopeInfo, + getTracerConfig(instrumentationScopeInfo))); + this.tracerConfigurator = tracerConfigurator; + } + + private TracerConfig getTracerConfig(InstrumentationScopeInfo instrumentationScopeInfo) { + TracerConfig tracerConfig = tracerConfigurator.apply(instrumentationScopeInfo); + return tracerConfig == null ? TracerConfig.defaultConfig() : tracerConfig; } @Override @@ -84,6 +103,25 @@ public Sampler getSampler() { return sharedState.getSampler(); } + /** + * Updates the tracer configurator, which computes {@link TracerConfig} for each {@link + * InstrumentationScopeInfo}. + * + *

    This method is experimental so not public. You may reflectively call it using {@link + * SdkTracerProviderUtil#setTracerConfigurator(SdkTracerProvider, ScopeConfigurator)}. + * + * @see TracerConfig#configuratorBuilder() + */ + void setTracerConfigurator(ScopeConfigurator tracerConfigurator) { + this.tracerConfigurator = tracerConfigurator; + this.tracerSdkComponentRegistry + .getComponents() + .forEach( + sdkTracer -> + sdkTracer.updateTracerConfig( + getTracerConfig(sdkTracer.getInstrumentationScopeInfo()))); + } + /** * Attempts to stop all the activity for {@link Tracer}s created by this provider. Calls {@link * SpanProcessor#shutdown()} for all registered {@link SpanProcessor}s. diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkTracerProviderBuilder.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkTracerProviderBuilder.java index 20dd76536d5..531cd1a6384 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkTracerProviderBuilder.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SdkTracerProviderBuilder.java @@ -8,11 +8,17 @@ import static java.util.Objects.requireNonNull; import io.opentelemetry.sdk.common.Clock; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; +import io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder; import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.internal.SdkTracerProviderUtil; +import io.opentelemetry.sdk.trace.internal.TracerConfig; import io.opentelemetry.sdk.trace.samplers.Sampler; import java.util.ArrayList; import java.util.List; import java.util.Objects; +import java.util.function.Predicate; import java.util.function.Supplier; /** Builder of {@link SdkTracerProvider}. */ @@ -26,6 +32,8 @@ public final class SdkTracerProviderBuilder { private Resource resource = Resource.getDefault(); private Supplier spanLimitsSupplier = SpanLimits::getDefault; private Sampler sampler = DEFAULT_SAMPLER; + private ScopeConfiguratorBuilder tracerConfiguratorBuilder = + TracerConfig.configuratorBuilder(); /** * Assign a {@link Clock}. {@link Clock} will be used each time a {@link @@ -147,6 +155,47 @@ public SdkTracerProviderBuilder addSpanProcessor(SpanProcessor spanProcessor) { return this; } + /** + * Set the tracer configurator, which computes {@link TracerConfig} for each {@link + * InstrumentationScopeInfo}. + * + *

    This method is experimental so not public. You may reflectively call it using {@link + * SdkTracerProviderUtil#setTracerConfigurator(SdkTracerProviderBuilder, ScopeConfigurator)}. + * + *

    Overrides any matchers added via {@link #addTracerConfiguratorCondition(Predicate, + * TracerConfig)}. + * + * @see TracerConfig#configuratorBuilder() + */ + SdkTracerProviderBuilder setTracerConfigurator( + ScopeConfigurator tracerConfigurator) { + this.tracerConfiguratorBuilder = tracerConfigurator.toBuilder(); + return this; + } + + /** + * Adds a condition to the tracer configurator, which computes {@link TracerConfig} for each + * {@link InstrumentationScopeInfo}. + * + *

    This method is experimental so not public. You may reflectively call it using {@link + * SdkTracerProviderUtil#addTracerConfiguratorCondition(SdkTracerProviderBuilder, Predicate, + * TracerConfig)}. + * + *

    Applies after any previously added conditions. + * + *

    If {@link #setTracerConfigurator(ScopeConfigurator)} was previously called, this condition + * will only be applied if the {@link ScopeConfigurator#apply(Object)} returns null for the + * matched {@link InstrumentationScopeInfo}(s). + * + * @see ScopeConfiguratorBuilder#nameEquals(String) + * @see ScopeConfiguratorBuilder#nameMatchesGlob(String) + */ + SdkTracerProviderBuilder addTracerConfiguratorCondition( + Predicate scopeMatcher, TracerConfig tracerConfig) { + this.tracerConfiguratorBuilder.addCondition(scopeMatcher, tracerConfig); + return this; + } + /** * Create a new {@link SdkTracerProvider} instance with the configuration. * @@ -154,7 +203,13 @@ public SdkTracerProviderBuilder addSpanProcessor(SpanProcessor spanProcessor) { */ public SdkTracerProvider build() { return new SdkTracerProvider( - clock, idsGenerator, resource, spanLimitsSupplier, sampler, spanProcessors); + clock, + idsGenerator, + resource, + spanLimitsSupplier, + sampler, + spanProcessors, + tracerConfiguratorBuilder.build()); } SdkTracerProviderBuilder() {} diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SpanLimitsBuilder.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SpanLimitsBuilder.java index 14b5f06f1a6..353bc19b044 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SpanLimitsBuilder.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SpanLimitsBuilder.java @@ -34,7 +34,7 @@ public final class SpanLimitsBuilder { * @throws IllegalArgumentException if {@code maxNumberOfAttributes} is not positive. */ public SpanLimitsBuilder setMaxNumberOfAttributes(int maxNumberOfAttributes) { - Utils.checkArgument(maxNumberOfAttributes > 0, "maxNumberOfAttributes must be greater than 0"); + Utils.checkArgument(maxNumberOfAttributes >= 0, "maxNumberOfAttributes must be non-negative"); this.maxNumAttributes = maxNumberOfAttributes; return this; } @@ -47,7 +47,7 @@ public SpanLimitsBuilder setMaxNumberOfAttributes(int maxNumberOfAttributes) { * @throws IllegalArgumentException if {@code maxNumberOfEvents} is not positive. */ public SpanLimitsBuilder setMaxNumberOfEvents(int maxNumberOfEvents) { - Utils.checkArgument(maxNumberOfEvents > 0, "maxNumberOfEvents must be greater than 0"); + Utils.checkArgument(maxNumberOfEvents >= 0, "maxNumberOfEvents must be non-negative"); this.maxNumEvents = maxNumberOfEvents; return this; } @@ -60,7 +60,7 @@ public SpanLimitsBuilder setMaxNumberOfEvents(int maxNumberOfEvents) { * @throws IllegalArgumentException if {@code maxNumberOfLinks} is not positive. */ public SpanLimitsBuilder setMaxNumberOfLinks(int maxNumberOfLinks) { - Utils.checkArgument(maxNumberOfLinks > 0, "maxNumberOfLinks must be greater than 0"); + Utils.checkArgument(maxNumberOfLinks >= 0, "maxNumberOfLinks must be non-negative"); this.maxNumLinks = maxNumberOfLinks; return this; } @@ -74,7 +74,7 @@ public SpanLimitsBuilder setMaxNumberOfLinks(int maxNumberOfLinks) { */ public SpanLimitsBuilder setMaxNumberOfAttributesPerEvent(int maxNumberOfAttributesPerEvent) { Utils.checkArgument( - maxNumberOfAttributesPerEvent > 0, "maxNumberOfAttributesPerEvent must be greater than 0"); + maxNumberOfAttributesPerEvent >= 0, "maxNumberOfAttributesPerEvent must be non-negative"); this.maxNumAttributesPerEvent = maxNumberOfAttributesPerEvent; return this; } @@ -88,7 +88,7 @@ public SpanLimitsBuilder setMaxNumberOfAttributesPerEvent(int maxNumberOfAttribu */ public SpanLimitsBuilder setMaxNumberOfAttributesPerLink(int maxNumberOfAttributesPerLink) { Utils.checkArgument( - maxNumberOfAttributesPerLink > 0, "maxNumberOfAttributesPerLink must be greater than 0"); + maxNumberOfAttributesPerLink >= 0, "maxNumberOfAttributesPerLink must be non-negative"); this.maxNumAttributesPerLink = maxNumberOfAttributesPerLink; return this; } @@ -104,7 +104,7 @@ public SpanLimitsBuilder setMaxNumberOfAttributesPerLink(int maxNumberOfAttribut */ public SpanLimitsBuilder setMaxAttributeValueLength(int maxAttributeValueLength) { Utils.checkArgument( - maxAttributeValueLength > -1, "maxAttributeValueLength must be non-negative"); + maxAttributeValueLength >= 0, "maxAttributeValueLength must be non-negative"); this.maxAttributeValueLength = maxAttributeValueLength; return this; } diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SpanWrapper.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SpanWrapper.java index 962601b1d58..f0d148af5c2 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SpanWrapper.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/SpanWrapper.java @@ -43,6 +43,8 @@ abstract class SpanWrapper implements SpanData { abstract int totalRecordedEvents(); + abstract int totalRecordedLinks(); + abstract StatusData status(); abstract String name(); @@ -62,6 +64,7 @@ static SpanWrapper create( Attributes attributes, int totalAttributeCount, int totalRecordedEvents, + int totalRecordedLinks, StatusData status, String name, long endEpochNanos, @@ -73,6 +76,7 @@ static SpanWrapper create( attributes, totalAttributeCount, totalRecordedEvents, + totalRecordedLinks, status, name, endEpochNanos, @@ -158,7 +162,7 @@ public int getTotalRecordedEvents() { @Override public int getTotalRecordedLinks() { - return delegate().getTotalRecordedLinks(); + return totalRecordedLinks(); } @Override diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/TracerSharedState.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/TracerSharedState.java index 99cd1ea26c0..3d07a2853f8 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/TracerSharedState.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/TracerSharedState.java @@ -15,6 +15,7 @@ // Represents the shared state/config between all Tracers created by the same TracerProvider. final class TracerSharedState { + private final Object lock = new Object(); private final Clock clock; private final IdGenerator idGenerator; diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/data/ExceptionEventData.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/data/ExceptionEventData.java new file mode 100644 index 00000000000..c5170e2485a --- /dev/null +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/data/ExceptionEventData.java @@ -0,0 +1,40 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.trace.data; + +import io.opentelemetry.api.common.Attributes; +import javax.annotation.concurrent.Immutable; + +/** + * Data representation of an event for a recorded exception. + * + * @since 1.44.0 + */ +@Immutable +public interface ExceptionEventData extends EventData { + + /** + * Returns a new immutable {@link ExceptionEventData}. + * + * @param epochNanos epoch timestamp in nanos of the {@link ExceptionEventData}. + * @param exception the {@link Throwable exception} of the {@code Event}. + * @param attributes the additional attributes of the {@link ExceptionEventData}. + * @param totalAttributeCount the total number of attributes for this {@code} Event. + * @return a new immutable {@link ExceptionEventData} + */ + static ExceptionEventData create( + long epochNanos, Throwable exception, Attributes attributes, int totalAttributeCount) { + return ImmutableExceptionEventData.create( + epochNanos, exception, attributes, totalAttributeCount); + } + + /** + * Return the {@link Throwable exception} of the {@link ExceptionEventData}. + * + * @return the {@link Throwable exception} of the {@link ExceptionEventData} + */ + Throwable getException(); +} diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/data/ImmutableExceptionEventData.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/data/ImmutableExceptionEventData.java new file mode 100644 index 00000000000..fd2e4bac752 --- /dev/null +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/data/ImmutableExceptionEventData.java @@ -0,0 +1,40 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.trace.data; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.api.common.Attributes; +import javax.annotation.concurrent.Immutable; + +/** An effectively immutable implementation of {@link ExceptionEventData}. */ +@AutoValue +@Immutable +abstract class ImmutableExceptionEventData implements ExceptionEventData { + + private static final String EXCEPTION_EVENT_NAME = "exception"; + + @Override + public final String getName() { + return EXCEPTION_EVENT_NAME; + } + + /** + * Returns a new immutable {@code Event}. + * + * @param epochNanos epoch timestamp in nanos of the {@code Event}. + * @param exception the {@link Throwable exception} of the {@code Event}. + * @param attributes the additional {@link Attributes} of the {@code Event}. + * @param totalAttributeCount the total number of attributes for this {@code} Event. + * @return a new immutable {@code Event} + */ + static ExceptionEventData create( + long epochNanos, Throwable exception, Attributes attributes, int totalAttributeCount) { + return new AutoValue_ImmutableExceptionEventData( + attributes, epochNanos, totalAttributeCount, exception); + } + + ImmutableExceptionEventData() {} +} diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/BatchSpanProcessor.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/BatchSpanProcessor.java index 8163522c16c..cdedb5abb1c 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/BatchSpanProcessor.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/BatchSpanProcessor.java @@ -48,11 +48,12 @@ public final class BatchSpanProcessor implements SpanProcessor { private static final String WORKER_THREAD_NAME = BatchSpanProcessor.class.getSimpleName() + "_WorkerThread"; private static final AttributeKey SPAN_PROCESSOR_TYPE_LABEL = - AttributeKey.stringKey("spanProcessorType"); + AttributeKey.stringKey("processorType"); private static final AttributeKey SPAN_PROCESSOR_DROPPED_LABEL = AttributeKey.booleanKey("dropped"); private static final String SPAN_PROCESSOR_TYPE_VALUE = BatchSpanProcessor.class.getSimpleName(); + private final boolean exportUnsampledSpans; private final Worker worker; private final AtomicBoolean isShutdown = new AtomicBoolean(false); @@ -69,11 +70,13 @@ public static BatchSpanProcessorBuilder builder(SpanExporter spanExporter) { BatchSpanProcessor( SpanExporter spanExporter, + boolean exportUnsampledSpans, MeterProvider meterProvider, long scheduleDelayNanos, int maxQueueSize, int maxExportBatchSize, long exporterTimeoutNanos) { + this.exportUnsampledSpans = exportUnsampledSpans; this.worker = new Worker( spanExporter, @@ -96,10 +99,9 @@ public boolean isStartRequired() { @Override public void onEnd(ReadableSpan span) { - if (span == null || !span.getSpanContext().isSampled()) { - return; + if (span != null && (exportUnsampledSpans || span.getSpanContext().isSampled())) { + worker.addSpan(span); } - worker.addSpan(span); } @Override @@ -120,6 +122,15 @@ public CompletableResultCode forceFlush() { return worker.forceFlush(); } + /** + * Return the processor's configured {@link SpanExporter}. + * + * @since 1.37.0 + */ + public SpanExporter getSpanExporter() { + return worker.spanExporter; + } + // Visible for testing List getBatch() { return worker.batch; @@ -135,6 +146,8 @@ public String toString() { return "BatchSpanProcessor{" + "spanExporter=" + worker.spanExporter + + ", exportUnsampledSpans=" + + exportUnsampledSpans + ", scheduleDelayNanos=" + worker.scheduleDelayNanos + ", maxExportBatchSize=" @@ -160,6 +173,7 @@ private static final class Worker implements Runnable { private long nextExportTime; private final Queue queue; + private final AtomicInteger queueSize = new AtomicInteger(); // When waiting on the spans queue, exporter thread sets this atomic to the number of more // spans it needs before doing an export. Writer threads would then wait for the queue to reach // spansNeeded size before notifying the exporter thread about new entries. @@ -189,7 +203,7 @@ private Worker( meter .gaugeBuilder("queueSize") .ofLongs() - .setDescription("The number of spans queued") + .setDescription("The number of items queued") .setUnit("1") .buildWithCallback( result -> @@ -224,7 +238,7 @@ private void addSpan(ReadableSpan span) { if (!queue.offer(span)) { processedSpansCounter.add(1, droppedAttrs); } else { - if (queue.size() >= spansNeeded.get()) { + if (queueSize.incrementAndGet() >= spansNeeded.get()) { signal.offer(true); } } @@ -238,8 +252,7 @@ public void run() { if (flushRequested.get() != null) { flush(); } - JcTools.drain( - queue, maxExportBatchSize - batch.size(), span -> batch.add(span.toSpanData())); + drain(maxExportBatchSize - batch.size()); if (batch.size() >= maxExportBatchSize || System.nanoTime() >= nextExportTime) { exportCurrentBatch(); @@ -261,13 +274,17 @@ public void run() { } } + private int drain(int limit) { + int drained = JcTools.drain(queue, limit, span -> batch.add(span.toSpanData())); + queueSize.addAndGet(-drained); + return drained; + } + private void flush() { - int spansToFlush = queue.size(); + int spansToFlush = queueSize.get(); while (spansToFlush > 0) { - ReadableSpan span = queue.poll(); - assert span != null; - batch.add(span.toSpanData()); - spansToFlush--; + int drained = drain(maxExportBatchSize - batch.size()); + spansToFlush -= drained; if (batch.size() >= maxExportBatchSize) { exportCurrentBatch(); } diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/BatchSpanProcessorBuilder.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/BatchSpanProcessorBuilder.java index d801c2296bc..d89083ded06 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/BatchSpanProcessorBuilder.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/BatchSpanProcessorBuilder.java @@ -11,9 +11,12 @@ import io.opentelemetry.api.metrics.MeterProvider; import java.time.Duration; import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; /** Builder class for {@link BatchSpanProcessor}. */ public final class BatchSpanProcessorBuilder { + private static final Logger logger = Logger.getLogger(BatchSpanProcessorBuilder.class.getName()); // Visible for testing static final long DEFAULT_SCHEDULE_DELAY_MILLIS = 5000; @@ -25,6 +28,7 @@ public final class BatchSpanProcessorBuilder { static final int DEFAULT_EXPORT_TIMEOUT_MILLIS = 30_000; private final SpanExporter spanExporter; + private boolean exportUnsampledSpans = false; private long scheduleDelayNanos = TimeUnit.MILLISECONDS.toNanos(DEFAULT_SCHEDULE_DELAY_MILLIS); private int maxQueueSize = DEFAULT_MAX_QUEUE_SIZE; private int maxExportBatchSize = DEFAULT_MAX_EXPORT_BATCH_SIZE; @@ -35,6 +39,17 @@ public final class BatchSpanProcessorBuilder { this.spanExporter = requireNonNull(spanExporter, "spanExporter"); } + /** + * Sets whether unsampled spans should be exported. If unset, defaults to exporting only sampled + * spans. + * + * @since 1.34.0 + */ + public BatchSpanProcessorBuilder setExportUnsampledSpans(boolean exportUnsampledSpans) { + this.exportUnsampledSpans = exportUnsampledSpans; + return this; + } + /** * Sets the delay interval between two consecutive exports. If unset, defaults to {@value * DEFAULT_SCHEDULE_DELAY_MILLIS}ms. @@ -67,7 +82,7 @@ long getScheduleDelayNanos() { public BatchSpanProcessorBuilder setExporterTimeout(long timeout, TimeUnit unit) { requireNonNull(unit, "unit"); checkArgument(timeout >= 0, "timeout must be non-negative"); - exporterTimeoutNanos = unit.toNanos(timeout); + exporterTimeoutNanos = timeout == 0 ? Long.MAX_VALUE : unit.toNanos(timeout); return this; } @@ -94,9 +109,11 @@ long getExporterTimeoutNanos() { * @param maxQueueSize the maximum number of Spans that are kept in the queue before start * dropping. * @return this. + * @throws IllegalArgumentException if {@code maxQueueSize} is not positive. * @see BatchSpanProcessorBuilder#DEFAULT_MAX_QUEUE_SIZE */ public BatchSpanProcessorBuilder setMaxQueueSize(int maxQueueSize) { + checkArgument(maxQueueSize > 0, "maxQueueSize must be positive."); this.maxQueueSize = maxQueueSize; return this; } @@ -144,8 +161,16 @@ int getMaxExportBatchSize() { * @return a new {@link BatchSpanProcessor}. */ public BatchSpanProcessor build() { + if (maxExportBatchSize > maxQueueSize) { + logger.log( + Level.WARNING, + "maxExportBatchSize should not exceed maxQueueSize. Setting maxExportBatchSize to {0} instead of {1}", + new Object[] {maxQueueSize, maxExportBatchSize}); + maxExportBatchSize = maxQueueSize; + } return new BatchSpanProcessor( spanExporter, + exportUnsampledSpans, meterProvider, scheduleDelayNanos, maxQueueSize, diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/SimpleSpanProcessor.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/SimpleSpanProcessor.java index ba05a5f0e94..f543e25353e 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/SimpleSpanProcessor.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/SimpleSpanProcessor.java @@ -36,11 +36,13 @@ public final class SimpleSpanProcessor implements SpanProcessor { private static final Logger logger = Logger.getLogger(SimpleSpanProcessor.class.getName()); private final SpanExporter spanExporter; - private final boolean sampled; + private final boolean exportUnsampledSpans; private final Set pendingExports = Collections.newSetFromMap(new ConcurrentHashMap<>()); private final AtomicBoolean isShutdown = new AtomicBoolean(false); + private final Object exporterLock = new Object(); + /** * Returns a new {@link SimpleSpanProcessor} which exports spans to the {@link SpanExporter} * synchronously. @@ -53,12 +55,22 @@ public final class SimpleSpanProcessor implements SpanProcessor { */ public static SpanProcessor create(SpanExporter exporter) { requireNonNull(exporter, "exporter"); - return new SimpleSpanProcessor(exporter, /* sampled= */ true); + return builder(exporter).build(); + } + + /** + * Returns a new Builder for {@link SimpleSpanProcessor}. + * + * @since 1.34.0 + */ + public static SimpleSpanProcessorBuilder builder(SpanExporter exporter) { + requireNonNull(exporter, "exporter"); + return new SimpleSpanProcessorBuilder(exporter); } - SimpleSpanProcessor(SpanExporter spanExporter, boolean sampled) { + SimpleSpanProcessor(SpanExporter spanExporter, boolean exportUnsampledSpans) { this.spanExporter = requireNonNull(spanExporter, "spanExporter"); - this.sampled = sampled; + this.exportUnsampledSpans = exportUnsampledSpans; } @Override @@ -73,22 +85,26 @@ public boolean isStartRequired() { @Override public void onEnd(ReadableSpan span) { - if (sampled && !span.getSpanContext().isSampled()) { - return; - } - try { - List spans = Collections.singletonList(span.toSpanData()); - CompletableResultCode result = spanExporter.export(spans); - pendingExports.add(result); - result.whenComplete( - () -> { - pendingExports.remove(result); - if (!result.isSuccess()) { - logger.log(Level.FINE, "Exporter failed"); - } - }); - } catch (RuntimeException e) { - logger.log(Level.WARNING, "Exporter threw an Exception", e); + if (span != null && (exportUnsampledSpans || span.getSpanContext().isSampled())) { + try { + List spans = Collections.singletonList(span.toSpanData()); + CompletableResultCode result; + + synchronized (exporterLock) { + result = spanExporter.export(spans); + } + + pendingExports.add(result); + result.whenComplete( + () -> { + pendingExports.remove(result); + if (!result.isSuccess()) { + logger.log(Level.FINE, "Exporter failed"); + } + }); + } catch (RuntimeException e) { + logger.log(Level.WARNING, "Exporter threw an Exception", e); + } } } @@ -126,8 +142,22 @@ public CompletableResultCode forceFlush() { return CompletableResultCode.ofAll(pendingExports); } + /** + * Return the processor's configured {@link SpanExporter}. + * + * @since 1.37.0 + */ + public SpanExporter getSpanExporter() { + return spanExporter; + } + @Override public String toString() { - return "SimpleSpanProcessor{" + "spanExporter=" + spanExporter + '}'; + return "SimpleSpanProcessor{" + + "spanExporter=" + + spanExporter + + ", exportUnsampledSpans=" + + exportUnsampledSpans + + '}'; } } diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/SimpleSpanProcessorBuilder.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/SimpleSpanProcessorBuilder.java new file mode 100644 index 00000000000..de9f3f9152a --- /dev/null +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/export/SimpleSpanProcessorBuilder.java @@ -0,0 +1,40 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.trace.export; + +import static java.util.Objects.requireNonNull; + +/** + * Builder class for {@link SimpleSpanProcessor}. + * + * @since 1.34.0 + */ +public final class SimpleSpanProcessorBuilder { + private final SpanExporter spanExporter; + private boolean exportUnsampledSpans = false; + + SimpleSpanProcessorBuilder(SpanExporter spanExporter) { + this.spanExporter = requireNonNull(spanExporter, "spanExporter"); + } + + /** + * Sets whether unsampled spans should be exported. If unset, defaults to exporting only sampled + * spans. + */ + public SimpleSpanProcessorBuilder setExportUnsampledSpans(boolean exportUnsampledSpans) { + this.exportUnsampledSpans = exportUnsampledSpans; + return this; + } + + /** + * Returns a new {@link SimpleSpanProcessor} with the configuration of this builder. + * + * @return a new {@link SimpleSpanProcessor}. + */ + public SimpleSpanProcessor build() { + return new SimpleSpanProcessor(spanExporter, exportUnsampledSpans); + } +} diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/ExtendedSpanProcessor.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/ExtendedSpanProcessor.java new file mode 100644 index 00000000000..5c608bf8275 --- /dev/null +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/ExtendedSpanProcessor.java @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.trace.internal; + +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.sdk.trace.ReadWriteSpan; +import io.opentelemetry.sdk.trace.ReadableSpan; +import io.opentelemetry.sdk.trace.SpanProcessor; + +/** + * Extended {@link SpanProcessor} with experimental APIs. + * + *

    This class is internal and is hence not for public use. Its APIs are unstable and can change + * at any time. + */ +public interface ExtendedSpanProcessor extends SpanProcessor { + + /** + * Called when a {@link io.opentelemetry.api.trace.Span} is ended, but before {@link + * SpanProcessor#onEnd(ReadableSpan)} is invoked with an immutable variant of this span. This + * means that the span will still be mutable. Note that the span will only be modifiable + * synchronously from this callback, concurrent modifications from other threads will be + * prevented. Only called if {@link Span#isRecording()} returns true. + * + *

    This method is called synchronously on the execution thread, should not throw or block the + * execution thread. + * + * @param span the {@code Span} that is just about to be ended. + */ + void onEnding(ReadWriteSpan span); + + /** + * Returns {@code true} if this {@link SpanProcessor} requires onEnding events. + * + * @return {@code true} if this {@link SpanProcessor} requires onEnding events. + */ + boolean isOnEndingRequired(); +} diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/SdkTracerProviderUtil.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/SdkTracerProviderUtil.java new file mode 100644 index 00000000000..753a312fa1f --- /dev/null +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/SdkTracerProviderUtil.java @@ -0,0 +1,75 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.trace.internal; + +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.SdkTracerProviderBuilder; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.function.Predicate; + +/** + * A collection of methods that allow use of experimental features prior to availability in public + * APIs. + * + *

    This class is internal and experimental. Its APIs are unstable and can change at any time. Its + * APIs (or a version of them) may be promoted to the public stable API in the future, but no + * guarantees are made. + */ +public final class SdkTracerProviderUtil { + + private SdkTracerProviderUtil() {} + + /** Reflectively set the {@link ScopeConfigurator} to the {@link SdkTracerProvider}. */ + public static void setTracerConfigurator( + SdkTracerProvider sdkTracerProvider, ScopeConfigurator scopeConfigurator) { + try { + Method method = + SdkTracerProvider.class.getDeclaredMethod( + "setTracerConfigurator", ScopeConfigurator.class); + method.setAccessible(true); + method.invoke(sdkTracerProvider, scopeConfigurator); + } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { + throw new IllegalStateException( + "Error calling setTracerConfigurator on SdkTracerProvider", e); + } + } + + /** Reflectively set the {@link ScopeConfigurator} to the {@link SdkTracerProviderBuilder}. */ + public static void setTracerConfigurator( + SdkTracerProviderBuilder sdkTracerProviderBuilder, + ScopeConfigurator tracerConfigurator) { + try { + Method method = + SdkTracerProviderBuilder.class.getDeclaredMethod( + "setTracerConfigurator", ScopeConfigurator.class); + method.setAccessible(true); + method.invoke(sdkTracerProviderBuilder, tracerConfigurator); + } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { + throw new IllegalStateException( + "Error calling setTracerConfigurator on SdkTracerProviderBuilder", e); + } + } + + /** Reflectively add a tracer configurator condition to the {@link SdkTracerProviderBuilder}. */ + public static void addTracerConfiguratorCondition( + SdkTracerProviderBuilder sdkTracerProviderBuilder, + Predicate scopeMatcher, + TracerConfig tracerConfig) { + try { + Method method = + SdkTracerProviderBuilder.class.getDeclaredMethod( + "addTracerConfiguratorCondition", Predicate.class, TracerConfig.class); + method.setAccessible(true); + method.invoke(sdkTracerProviderBuilder, scopeMatcher, tracerConfig); + } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { + throw new IllegalStateException( + "Error calling addTracerConfiguratorCondition on SdkTracerProviderBuilder", e); + } + } +} diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/TracerConfig.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/TracerConfig.java new file mode 100644 index 00000000000..4fa8c702b1c --- /dev/null +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/TracerConfig.java @@ -0,0 +1,67 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.trace.internal; + +import com.google.auto.value.AutoValue; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; +import io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder; +import io.opentelemetry.sdk.trace.SdkTracerProviderBuilder; +import java.util.function.Predicate; +import javax.annotation.concurrent.Immutable; + +/** + * A collection of configuration options which define the behavior of a {@link Tracer}. + * + *

    This class is internal and experimental. Its APIs are unstable and can change at any time. Its + * APIs (or a version of them) may be promoted to the public stable API in the future, but no + * guarantees are made. + * + * @see SdkTracerProviderUtil#setTracerConfigurator(SdkTracerProviderBuilder, ScopeConfigurator) + * @see SdkTracerProviderUtil#addTracerConfiguratorCondition(SdkTracerProviderBuilder, Predicate, + * TracerConfig) + */ +@AutoValue +@Immutable +public abstract class TracerConfig { + + private static final TracerConfig DEFAULT_CONFIG = + new AutoValue_TracerConfig(/* enabled= */ true); + private static final TracerConfig DISABLED_CONFIG = + new AutoValue_TracerConfig(/* enabled= */ false); + + /** Returns a disabled {@link TracerConfig}. */ + public static TracerConfig disabled() { + return DISABLED_CONFIG; + } + + /** Returns an enabled {@link TracerConfig}. */ + public static TracerConfig enabled() { + return DEFAULT_CONFIG; + } + + /** + * Returns the default {@link TracerConfig}, which is used when no configurator is set or when the + * tracer configurator returns {@code null} for a {@link InstrumentationScopeInfo}. + */ + public static TracerConfig defaultConfig() { + return DEFAULT_CONFIG; + } + + /** + * Create a {@link ScopeConfiguratorBuilder} for configuring {@link + * SdkTracerProviderUtil#setTracerConfigurator(SdkTracerProviderBuilder, ScopeConfigurator)}. + */ + public static ScopeConfiguratorBuilder configuratorBuilder() { + return ScopeConfigurator.builder(); + } + + TracerConfig() {} + + /** Returns {@code true} if this tracer is enabled. Defaults to {@code true}. */ + public abstract boolean isEnabled(); +} diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/data/ExceptionEventData.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/data/ExceptionEventData.java deleted file mode 100644 index 7e41c5a6b66..00000000000 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/data/ExceptionEventData.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.trace.internal.data; - -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.sdk.trace.SpanLimits; -import io.opentelemetry.sdk.trace.data.EventData; - -/** - * Data representation of an event for a recorded exception. - * - *

    This class is internal and is hence not for public use. Its APIs are unstable and can change - * at any time. - */ -public interface ExceptionEventData extends EventData { - - /** - * Returns a new immutable {@link ExceptionEventData}. - * - * @param spanLimits limits applied to {@link ExceptionEventData}. - * @param epochNanos epoch timestamp in nanos of the {@link ExceptionEventData}. - * @param exception the {@link Throwable exception} of the {@code Event}. - * @param additionalAttributes the additional attributes of the {@link ExceptionEventData}. - * @return a new immutable {@link ExceptionEventData} - */ - static ExceptionEventData create( - SpanLimits spanLimits, - long epochNanos, - Throwable exception, - Attributes additionalAttributes) { - return ImmutableExceptionEventData.create( - spanLimits, epochNanos, exception, additionalAttributes); - } - - /** - * Return the {@link Throwable exception} of the {@link ExceptionEventData}. - * - * @return the {@link Throwable exception} of the {@link ExceptionEventData} - */ - Throwable getException(); - - /** - * Return the additional {@link Attributes attributes} of the {@link ExceptionEventData}. - * - * @return the additional {@link Attributes attributes} of the {@link ExceptionEventData} - */ - Attributes getAdditionalAttributes(); -} diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/data/ImmutableExceptionEventData.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/data/ImmutableExceptionEventData.java deleted file mode 100644 index ee13f930775..00000000000 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/data/ImmutableExceptionEventData.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.trace.internal.data; - -import com.google.auto.value.AutoValue; -import com.google.auto.value.extension.memoized.Memoized; -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.sdk.internal.AttributeUtil; -import io.opentelemetry.sdk.trace.SpanLimits; -import java.io.PrintWriter; -import java.io.StringWriter; -import javax.annotation.concurrent.Immutable; - -/** An effectively immutable implementation of {@link ExceptionEventData}. */ -@AutoValue -@Immutable -abstract class ImmutableExceptionEventData implements ExceptionEventData { - - private static final AttributeKey EXCEPTION_TYPE = - AttributeKey.stringKey("exception.type"); - private static final AttributeKey EXCEPTION_MESSAGE = - AttributeKey.stringKey("exception.message"); - private static final AttributeKey EXCEPTION_STACKTRACE = - AttributeKey.stringKey("exception.stacktrace"); - private static final String EXCEPTION_EVENT_NAME = "exception"; - - /** - * Returns a new immutable {@code Event}. - * - * @param spanLimits limits applied to {@code Event}. - * @param epochNanos epoch timestamp in nanos of the {@code Event}. - * @param exception the {@link Throwable exception} of the {@code Event}. - * @param additionalAttributes the additional {@link Attributes} of the {@code Event}. - * @return a new immutable {@code Event} - */ - static ExceptionEventData create( - SpanLimits spanLimits, - long epochNanos, - Throwable exception, - Attributes additionalAttributes) { - - return new AutoValue_ImmutableExceptionEventData( - epochNanos, exception, additionalAttributes, spanLimits); - } - - ImmutableExceptionEventData() {} - - protected abstract SpanLimits getSpanLimits(); - - @Override - public final String getName() { - return EXCEPTION_EVENT_NAME; - } - - @Override - @Memoized - public Attributes getAttributes() { - Throwable exception = getException(); - Attributes additionalAttributes = getAdditionalAttributes(); - AttributesBuilder attributesBuilder = Attributes.builder(); - - attributesBuilder.put(EXCEPTION_TYPE, exception.getClass().getCanonicalName()); - String message = exception.getMessage(); - if (message != null) { - attributesBuilder.put(EXCEPTION_MESSAGE, message); - } - - StringWriter stringWriter = new StringWriter(); - try (PrintWriter printWriter = new PrintWriter(stringWriter)) { - exception.printStackTrace(printWriter); - } - attributesBuilder.put(EXCEPTION_STACKTRACE, stringWriter.toString()); - attributesBuilder.putAll(additionalAttributes); - - SpanLimits spanLimits = getSpanLimits(); - return AttributeUtil.applyAttributesLimit( - attributesBuilder.build(), - spanLimits.getMaxNumberOfAttributesPerEvent(), - spanLimits.getMaxAttributeValueLength()); - } - - @Override - public final int getTotalAttributeCount() { - return getAttributes().size(); - } -} diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/data/package-info.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/data/package-info.java deleted file mode 100644 index bab18f74550..00000000000 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/internal/data/package-info.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -/** - * Interfaces and implementations that are internal to OpenTelemetry. - * - *

    All the content under this package and its subpackages are considered not part of the public - * API, and must not be used by users of the OpenTelemetry library. - */ -@ParametersAreNonnullByDefault -package io.opentelemetry.sdk.trace.internal.data; - -import javax.annotation.ParametersAreNonnullByDefault; diff --git a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/samplers/TraceIdRatioBasedSampler.java b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/samplers/TraceIdRatioBasedSampler.java index 59a63856318..9a7490931af 100644 --- a/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/samplers/TraceIdRatioBasedSampler.java +++ b/sdk/trace/src/main/java/io/opentelemetry/sdk/trace/samplers/TraceIdRatioBasedSampler.java @@ -13,6 +13,7 @@ import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.util.List; +import java.util.Locale; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; @@ -112,7 +113,7 @@ private static long getTraceIdRandomPart(String traceId) { } private static String decimalFormat(double value) { - DecimalFormatSymbols decimalFormatSymbols = DecimalFormatSymbols.getInstance(); + DecimalFormatSymbols decimalFormatSymbols = DecimalFormatSymbols.getInstance(Locale.ROOT); decimalFormatSymbols.setDecimalSeparator('.'); DecimalFormat decimalFormat = new DecimalFormat("0.000000", decimalFormatSymbols); diff --git a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/MultiSpanProcessorTest.java b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/MultiSpanProcessorTest.java index c51fe61c954..34ac6e1c03d 100644 --- a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/MultiSpanProcessorTest.java +++ b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/MultiSpanProcessorTest.java @@ -14,6 +14,7 @@ import io.opentelemetry.context.Context; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.internal.ExtendedSpanProcessor; import java.util.Arrays; import java.util.Collections; import org.junit.jupiter.api.BeforeEach; @@ -27,18 +28,20 @@ @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) class MultiSpanProcessorTest { - @Mock private SpanProcessor spanProcessor1; - @Mock private SpanProcessor spanProcessor2; + @Mock private ExtendedSpanProcessor spanProcessor1; + @Mock private ExtendedSpanProcessor spanProcessor2; @Mock private ReadableSpan readableSpan; @Mock private ReadWriteSpan readWriteSpan; @BeforeEach void setUp() { when(spanProcessor1.isStartRequired()).thenReturn(true); + when(spanProcessor1.isOnEndingRequired()).thenReturn(true); when(spanProcessor1.isEndRequired()).thenReturn(true); when(spanProcessor1.forceFlush()).thenReturn(CompletableResultCode.ofSuccess()); when(spanProcessor1.shutdown()).thenReturn(CompletableResultCode.ofSuccess()); when(spanProcessor2.isStartRequired()).thenReturn(true); + when(spanProcessor2.isOnEndingRequired()).thenReturn(true); when(spanProcessor2.isEndRequired()).thenReturn(true); when(spanProcessor2.forceFlush()).thenReturn(CompletableResultCode.ofSuccess()); when(spanProcessor2.shutdown()).thenReturn(CompletableResultCode.ofSuccess()); @@ -61,12 +64,17 @@ void oneSpanProcessor() { @Test void twoSpanProcessor() { - SpanProcessor multiSpanProcessor = - SpanProcessor.composite(Arrays.asList(spanProcessor1, spanProcessor2)); + ExtendedSpanProcessor multiSpanProcessor = + (ExtendedSpanProcessor) + SpanProcessor.composite(Arrays.asList(spanProcessor1, spanProcessor2)); multiSpanProcessor.onStart(Context.root(), readWriteSpan); verify(spanProcessor1).onStart(same(Context.root()), same(readWriteSpan)); verify(spanProcessor2).onStart(same(Context.root()), same(readWriteSpan)); + multiSpanProcessor.onEnding(readWriteSpan); + verify(spanProcessor1).onEnding(same(readWriteSpan)); + verify(spanProcessor2).onEnding(same(readWriteSpan)); + multiSpanProcessor.onEnd(readableSpan); verify(spanProcessor1).onEnd(same(readableSpan)); verify(spanProcessor2).onEnd(same(readableSpan)); @@ -83,9 +91,11 @@ void twoSpanProcessor() { @Test void twoSpanProcessor_DifferentRequirements() { when(spanProcessor1.isEndRequired()).thenReturn(false); + when(spanProcessor2.isOnEndingRequired()).thenReturn(false); when(spanProcessor2.isStartRequired()).thenReturn(false); - SpanProcessor multiSpanProcessor = - SpanProcessor.composite(Arrays.asList(spanProcessor1, spanProcessor2)); + ExtendedSpanProcessor multiSpanProcessor = + (ExtendedSpanProcessor) + SpanProcessor.composite(Arrays.asList(spanProcessor1, spanProcessor2)); assertThat(multiSpanProcessor.isStartRequired()).isTrue(); assertThat(multiSpanProcessor.isEndRequired()).isTrue(); @@ -94,6 +104,10 @@ void twoSpanProcessor_DifferentRequirements() { verify(spanProcessor1).onStart(same(Context.root()), same(readWriteSpan)); verify(spanProcessor2, times(0)).onStart(any(Context.class), any(ReadWriteSpan.class)); + multiSpanProcessor.onEnding(readWriteSpan); + verify(spanProcessor1).onEnding(same(readWriteSpan)); + verify(spanProcessor2, times(0)).onEnding(any(ReadWriteSpan.class)); + multiSpanProcessor.onEnd(readableSpan); verify(spanProcessor1, times(0)).onEnd(any(ReadableSpan.class)); verify(spanProcessor2).onEnd(same(readableSpan)); @@ -117,6 +131,7 @@ void stringRepresentation() { .hasToString( "MultiSpanProcessor{" + "spanProcessorsStart=[spanProcessor1, spanProcessor1], " + + "spanProcessorsEnding=[spanProcessor1, spanProcessor1], " + "spanProcessorsEnd=[spanProcessor1, spanProcessor1], " + "spanProcessorsAll=[spanProcessor1, spanProcessor1]}"); } diff --git a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkSpanBuilderTest.java b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkSpanBuilderTest.java index 461ed774cca..9c9ecdc7e9c 100644 --- a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkSpanBuilderTest.java +++ b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkSpanBuilderTest.java @@ -246,19 +246,21 @@ void setAttribute() { .setAttribute("long", 12345L) .setAttribute("double", .12345) .setAttribute("boolean", true) - .setAttribute(stringKey("stringAttribute"), "attrvalue"); + .setAttribute(stringKey("stringAttribute"), "attrvalue") + .setAttribute(longKey("longAttribute"), 123); SdkSpan span = (SdkSpan) spanBuilder.startSpan(); try { SpanData spanData = span.toSpanData(); Attributes attrs = spanData.getAttributes(); - assertThat(attrs.size()).isEqualTo(5); + assertThat(attrs.size()).isEqualTo(6); assertThat(attrs.get(stringKey("string"))).isEqualTo("value"); assertThat(attrs.get(longKey("long"))).isEqualTo(12345L); assertThat(attrs.get(doubleKey("double"))).isEqualTo(0.12345); assertThat(attrs.get(booleanKey("boolean"))).isEqualTo(true); assertThat(attrs.get(stringKey("stringAttribute"))).isEqualTo("attrvalue"); - assertThat(spanData.getTotalAttributeCount()).isEqualTo(5); + assertThat(attrs.get(longKey("longAttribute"))).isEqualTo(123); + assertThat(spanData.getTotalAttributeCount()).isEqualTo(6); } finally { span.end(); } diff --git a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkSpanTest.java b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkSpanTest.java index a87c58d7c3c..fb387cf2a18 100644 --- a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkSpanTest.java +++ b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkSpanTest.java @@ -15,7 +15,14 @@ import static io.opentelemetry.api.common.AttributeKey.stringKey; import static java.util.stream.Collectors.joining; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; @@ -34,14 +41,16 @@ import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.time.TestClock; import io.opentelemetry.sdk.trace.data.EventData; +import io.opentelemetry.sdk.trace.data.ExceptionEventData; import io.opentelemetry.sdk.trace.data.LinkData; import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.data.StatusData; -import io.opentelemetry.sdk.trace.internal.data.ExceptionEventData; +import io.opentelemetry.sdk.trace.internal.ExtendedSpanProcessor; import java.io.PrintWriter; import java.io.StringWriter; import java.time.Duration; import java.time.Instant; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -52,17 +61,27 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Consumer; import java.util.stream.IntStream; +import java.util.stream.Stream; import javax.annotation.Nullable; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; @SuppressWarnings({"rawtypes", "unchecked"}) @ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) class SdkSpanTest { private static final String SPAN_NAME = "MySpanName"; private static final String SPAN_NEW_NAME = "NewName"; @@ -81,7 +100,7 @@ class SdkSpanTest { private final Map attributes = new HashMap<>(); private Attributes expectedAttributes; private final LinkData link = LinkData.create(spanContext); - @Mock private SpanProcessor spanProcessor; + @Mock private ExtendedSpanProcessor spanProcessor; private TestClock testClock; @@ -97,6 +116,9 @@ void setUp() { } expectedAttributes = builder.build(); testClock = TestClock.create(Instant.ofEpochSecond(0, START_EPOCH_NANOS)); + when(spanProcessor.isStartRequired()).thenReturn(true); + when(spanProcessor.isOnEndingRequired()).thenReturn(true); + when(spanProcessor.isEndRequired()).thenReturn(true); } @Test @@ -129,6 +151,92 @@ void endSpanTwice_DoNotCrash() { assertThat(span.hasEnded()).isTrue(); } + @Test + void onEnding_spanStillMutable() { + SdkSpan span = createTestSpan(SpanKind.INTERNAL); + + AttributeKey dummyAttrib = AttributeKey.stringKey("processor_foo"); + + AtomicBoolean endedStateInProcessor = new AtomicBoolean(); + doAnswer( + invocation -> { + ReadWriteSpan sp = invocation.getArgument(0, ReadWriteSpan.class); + assertThat(sp.hasEnded()).isFalse(); + sp.end(); // should have no effect, nested end should be detected + endedStateInProcessor.set(sp.hasEnded()); + sp.setAttribute(dummyAttrib, "bar"); + return null; + }) + .when(spanProcessor) + .onEnding(any()); + + span.end(); + verify(spanProcessor).onEnding(same(span)); + assertThat(span.hasEnded()).isTrue(); + assertThat(endedStateInProcessor.get()).isFalse(); + assertThat(span.getAttribute(dummyAttrib)).isEqualTo("bar"); + } + + @Test + void onEnding_concurrentModificationsPrevented() { + SdkSpan span = createTestSpan(SpanKind.INTERNAL); + + AttributeKey syncAttrib = AttributeKey.stringKey("sync_foo"); + AttributeKey concurrentAttrib = AttributeKey.stringKey("concurrent_foo"); + + doAnswer( + invocation -> { + ReadWriteSpan sp = invocation.getArgument(0, ReadWriteSpan.class); + + Thread concurrent = + new Thread( + () -> { + sp.setAttribute(concurrentAttrib, "concurrent_bar"); + }); + concurrent.start(); + concurrent.join(); + + sp.setAttribute(syncAttrib, "sync_bar"); + + return null; + }) + .when(spanProcessor) + .onEnding(any()); + + span.end(); + verify(spanProcessor).onEnding(same(span)); + assertThat(span.getAttribute(concurrentAttrib)).isNull(); + assertThat(span.getAttribute(syncAttrib)).isEqualTo("sync_bar"); + } + + @Test + void onEnding_latencyPinned() { + SdkSpan span = createTestSpan(SpanKind.INTERNAL); + + AtomicLong spanLatencyInProcessor = new AtomicLong(); + doAnswer( + invocation -> { + ReadWriteSpan sp = invocation.getArgument(0, ReadWriteSpan.class); + + testClock.advance(Duration.ofSeconds(100)); + spanLatencyInProcessor.set(sp.getLatencyNanos()); + return null; + }) + .when(spanProcessor) + .onEnding(any()); + + testClock.advance(Duration.ofSeconds(1)); + long expectedDuration = testClock.now() - START_EPOCH_NANOS; + + assertThat(span.getLatencyNanos()).isEqualTo(expectedDuration); + + span.end(); + verify(spanProcessor).onEnding(same(span)); + assertThat(span.hasEnded()).isTrue(); + assertThat(span.getLatencyNanos()).isEqualTo(expectedDuration); + assertThat(spanLatencyInProcessor.get()).isEqualTo(expectedDuration); + } + @Test void toSpanData_ActiveSpan() { SdkSpan span = createTestSpan(SpanKind.INTERNAL); @@ -329,6 +437,32 @@ void getAttribute() { } } + @Test + void getAttributes() { + SdkSpan span = createTestSpanWithAttributes(attributes); + try { + assertThat(span.getAttributes()) + .isEqualTo( + Attributes.builder() + .put("MyBooleanAttributeKey", false) + .put("MyStringAttributeKey", "MyStringAttributeValue") + .put("MyLongAttributeKey", 123L) + .build()); + } finally { + span.end(); + } + } + + @Test + void getAttributes_Empty() { + SdkSpan span = createTestSpan(SpanKind.INTERNAL); + try { + assertThat(span.getAttributes()).isEqualTo(Attributes.empty()); + } finally { + span.end(); + } + } + @Test @SuppressWarnings("deprecation") // Testing deprecated code void getInstrumentationLibraryInfo() { @@ -749,6 +883,7 @@ void attributeLength() { assertThat(event.getAttributes().get(stringKey("exception.message"))).isEqualTo(strVal); assertThat(event.getAttributes().get(stringKey("exception.stacktrace")).length()) .isLessThanOrEqualTo(maxLength); + assertThat(event.getAttributes().size()).isEqualTo(3); } finally { span.end(); } @@ -792,6 +927,129 @@ void eventAttributeLength() { } } + @Test + void addLink() { + int maxLinks = 3; + int maxNumberOfAttributes = 4; + int maxAttributeLength = 5; + SdkSpan span = + createTestSpan( + SpanKind.INTERNAL, + SpanLimits.builder() + .setMaxNumberOfLinks(maxLinks) + .setMaxNumberOfAttributesPerLink(maxNumberOfAttributes) + .setMaxAttributeValueLength(maxAttributeLength) + .build(), + parentSpanId, + null, + null); + try { + Span span1 = createTestSpan(SpanKind.INTERNAL); + Span span2 = createTestSpan(SpanKind.INTERNAL); + Span span3 = createTestSpan(SpanKind.INTERNAL); + Span span4 = createTestSpan(SpanKind.INTERNAL); + + span.addLink(span1.getSpanContext()); + + Attributes span2LinkAttributes = + Attributes.builder() + .put("key1", true) + .put("key2", true) + .put("key3", true) + .put( + "key4", + IntStream.range(0, maxAttributeLength + 1).mapToObj(i -> "a").collect(joining())) + .build(); + span.addLink(span2.getSpanContext(), span2LinkAttributes); + + Attributes span3LinkAttributes = + Attributes.builder() + .put("key1", true) + .put("key2", true) + .put("key3", true) + .put("key4", true) + .put("key5", true) + .build(); + span.addLink(span3.getSpanContext(), span3LinkAttributes); + + span.addLink(span4.getSpanContext()); + + SpanData spanData = span.toSpanData(); + // 1 link added during span construction via createTestSpan, 4 links added after span start + assertThat(spanData.getTotalRecordedLinks()).isEqualTo(4); + assertThat(spanData.getLinks()) + .satisfiesExactly( + link -> { + assertThat(link.getSpanContext()).isEqualTo(span1.getSpanContext()); + assertThat(link.getAttributes()).isEqualTo(Attributes.empty()); + }, + link -> { + assertThat(link.getSpanContext()).isEqualTo(span2.getSpanContext()); + assertThat(link.getAttributes()) + .isEqualTo( + Attributes.builder() + .put("key1", true) + .put("key2", true) + .put("key3", true) + // Should be truncated to max attribute length + .put( + "key4", + IntStream.range(0, maxAttributeLength) + .mapToObj(i -> "a") + .collect(joining())) + .build()); + }, + link -> { + assertThat(link.getSpanContext()).isEqualTo(span2.getSpanContext()); + // The 5th attribute key should be omitted due to attribute limits. Can't predict + // which of the 5 is dropped. + assertThat(link.getAttributes().size()).isEqualTo(4); + }); + } finally { + span.end(); + } + } + + @Test + void addLink_InvalidArgs() { + SdkSpan span = createTestSpan(SpanKind.INTERNAL); + assertThatCode(() -> span.addLink(null)).doesNotThrowAnyException(); + assertThatCode(() -> span.addLink(SpanContext.getInvalid())).doesNotThrowAnyException(); + assertThatCode(() -> span.addLink(null, null)).doesNotThrowAnyException(); + assertThatCode(() -> span.addLink(SpanContext.getInvalid(), Attributes.empty())) + .doesNotThrowAnyException(); + } + + @Test + void addLink_FaultIn() { + SdkSpan span = + SdkSpan.startSpan( + spanContext, + SPAN_NAME, + instrumentationScopeInfo, + SpanKind.INTERNAL, + Span.getInvalid(), + Context.root(), + SpanLimits.getDefault(), + spanProcessor, + testClock, + resource, + null, + null, // exercises the fault-in path + 0, + 0); + SdkSpan linkedSpan = createTestSpan(SpanKind.INTERNAL); + span.addLink(linkedSpan.getSpanContext()); + + SpanData spanData = span.toSpanData(); + assertThat(spanData.getTotalRecordedLinks()).isEqualTo(1); + assertThat(spanData.getLinks()) + .satisfiesExactly( + link -> { + assertThat(link.getSpanContext()).isEqualTo(linkedSpan.getSpanContext()); + }); + } + @Test void droppingAttributes() { int maxNumberOfAttributes = 8; @@ -907,6 +1165,9 @@ void recordException() { testClock.advance(Duration.ofNanos(1000)); long timestamp = testClock.now(); + // make sure that span attributes don't leak down to the exception event + span.setAttribute("spankey", "val"); + span.recordException(exception); List events = span.toSpanData().getEvents(); @@ -914,20 +1175,17 @@ void recordException() { EventData event = events.get(0); assertThat(event.getName()).isEqualTo("exception"); assertThat(event.getEpochNanos()).isEqualTo(timestamp); - assertThat(event.getAttributes()) - .isEqualTo( - Attributes.builder() - .put("exception.type", "java.lang.IllegalStateException") - .put("exception.message", "there was an exception") - .put("exception.stacktrace", stacktrace) - .build()); - + assertThat(event.getAttributes().get(stringKey("exception.message"))) + .isEqualTo("there was an exception"); + assertThat(event.getAttributes().get(stringKey("exception.type"))) + .isEqualTo(exception.getClass().getName()); + assertThat(event.getAttributes().get(stringKey("exception.stacktrace"))).isEqualTo(stacktrace); + assertThat(event.getAttributes().size()).isEqualTo(3); assertThat(event) .isInstanceOfSatisfying( ExceptionEventData.class, exceptionEvent -> { assertThat(exceptionEvent.getException()).isSameAs(exception); - assertThat(exceptionEvent.getAdditionalAttributes()).isEqualTo(Attributes.empty()); }); } @@ -936,12 +1194,20 @@ void recordException_noMessage() { IllegalStateException exception = new IllegalStateException(); SdkSpan span = createTestRootSpan(); + StringWriter writer = new StringWriter(); + exception.printStackTrace(new PrintWriter(writer)); + String stacktrace = writer.toString(); + span.recordException(exception); List events = span.toSpanData().getEvents(); assertThat(events).hasSize(1); EventData event = events.get(0); assertThat(event.getAttributes().get(stringKey("exception.message"))).isNull(); + assertThat(event.getAttributes().get(stringKey("exception.type"))) + .isEqualTo("java.lang.IllegalStateException"); + assertThat(event.getAttributes().get(stringKey("exception.stacktrace"))).isEqualTo(stacktrace); + assertThat(event.getAttributes().size()).isEqualTo(2); } private static class InnerClassException extends Exception {} @@ -951,6 +1217,10 @@ void recordException_innerClassException() { InnerClassException exception = new InnerClassException(); SdkSpan span = createTestRootSpan(); + StringWriter writer = new StringWriter(); + exception.printStackTrace(new PrintWriter(writer)); + String stacktrace = writer.toString(); + span.recordException(exception); List events = span.toSpanData().getEvents(); @@ -958,6 +1228,8 @@ void recordException_innerClassException() { EventData event = events.get(0); assertThat(event.getAttributes().get(stringKey("exception.type"))) .isEqualTo("io.opentelemetry.sdk.trace.SdkSpanTest.InnerClassException"); + assertThat(event.getAttributes().get(stringKey("exception.stacktrace"))).isEqualTo(stacktrace); + assertThat(event.getAttributes().size()).isEqualTo(2); } @Test @@ -972,6 +1244,9 @@ void recordException_additionalAttributes() { testClock.advance(Duration.ofNanos(1000)); long timestamp = testClock.now(); + // make sure that span attributes don't leak down to the exception event + span.setAttribute("spankey", "val"); + span.recordException( exception, Attributes.of( @@ -985,30 +1260,38 @@ void recordException_additionalAttributes() { EventData event = events.get(0); assertThat(event.getName()).isEqualTo("exception"); assertThat(event.getEpochNanos()).isEqualTo(timestamp); - assertThat(event.getAttributes()) - .isEqualTo( - Attributes.builder() - .put("key1", "this is an additional attribute") - .put("exception.type", "java.lang.IllegalStateException") - .put("exception.message", "this is a precedence attribute") - .put("exception.stacktrace", stacktrace) - .build()); + assertThat(event.getAttributes().get(stringKey("exception.message"))) + .isEqualTo("this is a precedence attribute"); + assertThat(event.getAttributes().get(stringKey("key1"))) + .isEqualTo("this is an additional attribute"); + assertThat(event.getAttributes().get(stringKey("exception.type"))) + .isEqualTo("java.lang.IllegalStateException"); + assertThat(event.getAttributes().get(stringKey("exception.stacktrace"))).isEqualTo(stacktrace); + assertThat(event.getAttributes().size()).isEqualTo(4); assertThat(event) .isInstanceOfSatisfying( ExceptionEventData.class, exceptionEvent -> { assertThat(exceptionEvent.getException()).isSameAs(exception); - assertThat(exceptionEvent.getAdditionalAttributes()) - .isEqualTo( - Attributes.of( - stringKey("key1"), - "this is an additional attribute", - stringKey("exception.message"), - "this is a precedence attribute")); }); } + @Test + void recordException_SpanLimits() { + SdkSpan span = createTestSpan(SpanLimits.builder().setMaxNumberOfAttributes(2).build()); + span.recordException( + new IllegalStateException("error"), + Attributes.builder().put("key1", "value").put("key2", "value").build()); + + List events = span.toSpanData().getEvents(); + assertThat(events.size()).isEqualTo(1); + EventData event = events.get(0); + assertThat(event.getAttributes().size()).isEqualTo(2); + assertThat(event.getTotalAttributeCount()).isEqualTo(5); + assertThat(event.getTotalAttributeCount() - event.getAttributes().size()).isPositive(); + } + @Test void badArgsIgnored() { SdkSpan span = createTestRootSpan(); @@ -1040,6 +1323,96 @@ void badArgsIgnored() { assertThat(data.getName()).isEqualTo(SPAN_NAME); } + @Test + void onStartOnEndNotRequired() { + when(spanProcessor.isStartRequired()).thenReturn(false); + when(spanProcessor.isEndRequired()).thenReturn(false); + + SpanLimits spanLimits = SpanLimits.getDefault(); + SdkSpan span = + SdkSpan.startSpan( + spanContext, + SPAN_NAME, + instrumentationScopeInfo, + SpanKind.INTERNAL, + parentSpanId != null + ? Span.wrap( + SpanContext.create( + traceId, parentSpanId, TraceFlags.getDefault(), TraceState.getDefault())) + : Span.getInvalid(), + Context.root(), + spanLimits, + spanProcessor, + testClock, + resource, + AttributesMap.create( + spanLimits.getMaxNumberOfAttributes(), spanLimits.getMaxAttributeValueLength()), + Collections.emptyList(), + 1, + 0); + verify(spanProcessor, never()).onStart(any(), any()); + + span.end(); + verify(spanProcessor, never()).onEnd(any()); + } + + @ParameterizedTest + @MethodSource("setStatusArgs") + void setStatus(Consumer spanConsumer, StatusData expectedSpanData) { + SdkSpan testSpan = createTestRootSpan(); + spanConsumer.accept(testSpan); + assertThat(testSpan.toSpanData().getStatus()).isEqualTo(expectedSpanData); + } + + private static Stream setStatusArgs() { + return Stream.of( + // Default status is UNSET + Arguments.of(spanConsumer(span -> {}), StatusData.unset()), + // Simple cases + Arguments.of(spanConsumer(span -> span.setStatus(StatusCode.OK)), StatusData.ok()), + Arguments.of(spanConsumer(span -> span.setStatus(StatusCode.ERROR)), StatusData.error()), + // UNSET is ignored + Arguments.of( + spanConsumer(span -> span.setStatus(StatusCode.OK).setStatus(StatusCode.UNSET)), + StatusData.ok()), + Arguments.of( + spanConsumer(span -> span.setStatus(StatusCode.ERROR).setStatus(StatusCode.UNSET)), + StatusData.error()), + // Description is ignored unless status is ERROR + Arguments.of( + spanConsumer(span -> span.setStatus(StatusCode.UNSET, "description")), + StatusData.unset()), + Arguments.of( + spanConsumer(span -> span.setStatus(StatusCode.OK, "description")), StatusData.ok()), + Arguments.of( + spanConsumer(span -> span.setStatus(StatusCode.ERROR, "description")), + StatusData.create(StatusCode.ERROR, "description")), + // ERROR is ignored if status is OK + Arguments.of( + spanConsumer( + span -> span.setStatus(StatusCode.OK).setStatus(StatusCode.ERROR, "description")), + StatusData.ok()), + // setStatus ignored after span is ended + Arguments.of( + spanConsumer( + span -> { + span.end(); + span.setStatus(StatusCode.OK); + }), + StatusData.unset()), + Arguments.of( + spanConsumer( + span -> { + span.end(); + span.setStatus(StatusCode.ERROR); + }), + StatusData.unset())); + } + + private static Consumer spanConsumer(Consumer spanConsumer) { + return spanConsumer; + } + private SdkSpan createTestSpanWithAttributes(Map attributes) { SpanLimits spanLimits = SpanLimits.getDefault(); AttributesMap attributesMap = @@ -1078,7 +1451,8 @@ private SdkSpan createTestSpan( SpanLimits config, @Nullable String parentSpanId, @Nullable AttributesMap attributes, - List links) { + @Nullable List links) { + List linksCopy = links == null ? new ArrayList<>() : new ArrayList<>(links); SdkSpan span = SdkSpan.startSpan( @@ -1097,8 +1471,8 @@ private SdkSpan createTestSpan( testClock, resource, attributes, - links, - 1, + linksCopy, + linksCopy.size(), 0); Mockito.verify(spanProcessor, Mockito.times(1)).onStart(Context.root(), span); return span; @@ -1160,7 +1534,9 @@ void testAsSpanData() { Resource resource = this.resource; Attributes attributes = TestUtils.generateRandomAttributes(); AttributesMap attributesWithCapacity = AttributesMap.create(32, Integer.MAX_VALUE); - attributes.forEach(attributesWithCapacity::put); + attributes.forEach( + (attributeKey, object) -> + attributesWithCapacity.put((AttributeKey) attributeKey, object)); Attributes event1Attributes = TestUtils.generateRandomAttributes(); Attributes event2Attributes = TestUtils.generateRandomAttributes(); SpanContext context = diff --git a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkTracerProviderTest.java b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkTracerProviderTest.java index 44f75373cb1..61b64b4ebdf 100644 --- a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkTracerProviderTest.java +++ b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkTracerProviderTest.java @@ -18,7 +18,10 @@ import io.opentelemetry.sdk.common.Clock; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.internal.SdkTracerProviderUtil; +import io.opentelemetry.sdk.trace.internal.TracerConfig; import io.opentelemetry.sdk.trace.samplers.Sampler; import java.util.function.Supplier; import org.junit.jupiter.api.BeforeEach; @@ -184,6 +187,35 @@ void propagatesInstrumentationScopeInfoToTracer() { assertThat(((SdkTracer) tracer).getInstrumentationScopeInfo()).isEqualTo(expected); } + @Test + void propagatesEnablementToTracerDirectly() { + propagatesEnablementToTracer(true); + } + + @Test + void propagatesEnablementToTracerByUtil() { + propagatesEnablementToTracer(false); + } + + void propagatesEnablementToTracer(boolean directly) { + SdkTracer tracer = (SdkTracer) tracerFactory.get("test"); + boolean isEnabled = tracer.isEnabled(); + ScopeConfigurator flipConfigurator = + new ScopeConfigurator() { + @Override + public TracerConfig apply(InstrumentationScopeInfo scopeInfo) { + return isEnabled ? TracerConfig.disabled() : TracerConfig.enabled(); + } + }; + // all in the same thread, so should see enablement change immediately + if (directly) { + tracerFactory.setTracerConfigurator(flipConfigurator); + } else { + SdkTracerProviderUtil.setTracerConfigurator(tracerFactory, flipConfigurator); + } + assertThat(tracer.isEnabled()).isEqualTo(!isEnabled); + } + @Test void build_SpanLimits() { SpanLimits initialSpanLimits = SpanLimits.builder().build(); diff --git a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkTracerTest.java b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkTracerTest.java index c17c121a36e..0ca18de5f15 100644 --- a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkTracerTest.java +++ b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/SdkTracerTest.java @@ -16,6 +16,7 @@ import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; import io.opentelemetry.sdk.trace.export.SpanExporter; +import io.opentelemetry.sdk.trace.internal.TracerConfig; import java.util.Collection; import java.util.concurrent.atomic.AtomicLong; import org.junit.jupiter.api.Test; @@ -50,6 +51,14 @@ void getInstrumentationScopeInfo() { assertThat(tracer.getInstrumentationScopeInfo()).isEqualTo(instrumentationScopeInfo); } + @Test + void updateEnabled() { + tracer.updateTracerConfig(TracerConfig.disabled()); + assertThat(tracer.isEnabled()).isFalse(); + tracer.updateTracerConfig(TracerConfig.enabled()); + assertThat(tracer.isEnabled()).isTrue(); + } + @Test void propagatesInstrumentationScopeInfoToSpan() { ReadableSpan readableSpan = (ReadableSpan) tracer.spanBuilder("spanName").startSpan(); diff --git a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/config/SpanLimitsTest.java b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/config/SpanLimitsTest.java index ab749cea0f5..73d9ac2e842 100644 --- a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/config/SpanLimitsTest.java +++ b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/config/SpanLimitsTest.java @@ -6,6 +6,7 @@ package io.opentelemetry.sdk.trace.config; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; import io.opentelemetry.sdk.trace.SpanLimits; @@ -46,27 +47,31 @@ void updateSpanLimits_All() { @Test void invalidSpanLimits() { - assertThatThrownBy(() -> SpanLimits.builder().setMaxNumberOfAttributes(0)) - .isInstanceOf(IllegalArgumentException.class); assertThatThrownBy(() -> SpanLimits.builder().setMaxNumberOfAttributes(-1)) .isInstanceOf(IllegalArgumentException.class); - assertThatThrownBy(() -> SpanLimits.builder().setMaxNumberOfEvents(0)) - .isInstanceOf(IllegalArgumentException.class); assertThatThrownBy(() -> SpanLimits.builder().setMaxNumberOfEvents(-1)) .isInstanceOf(IllegalArgumentException.class); - assertThatThrownBy(() -> SpanLimits.builder().setMaxNumberOfLinks(0)) - .isInstanceOf(IllegalArgumentException.class); assertThatThrownBy(() -> SpanLimits.builder().setMaxNumberOfLinks(-1)) .isInstanceOf(IllegalArgumentException.class); - assertThatThrownBy(() -> SpanLimits.builder().setMaxNumberOfAttributesPerEvent(0)) - .isInstanceOf(IllegalArgumentException.class); assertThatThrownBy(() -> SpanLimits.builder().setMaxNumberOfAttributesPerEvent(-1)) .isInstanceOf(IllegalArgumentException.class); - assertThatThrownBy(() -> SpanLimits.builder().setMaxNumberOfAttributesPerLink(0)) - .isInstanceOf(IllegalArgumentException.class); assertThatThrownBy(() -> SpanLimits.builder().setMaxNumberOfAttributesPerLink(-1)) .isInstanceOf(IllegalArgumentException.class); assertThatThrownBy(() -> SpanLimits.builder().setMaxAttributeValueLength(-1)) .isInstanceOf(IllegalArgumentException.class); } + + @Test + void validSpanLimits() { + assertThatCode(() -> SpanLimits.builder().setMaxNumberOfAttributes(0)) + .doesNotThrowAnyException(); + assertThatCode(() -> SpanLimits.builder().setMaxNumberOfEvents(0)).doesNotThrowAnyException(); + assertThatCode(() -> SpanLimits.builder().setMaxNumberOfLinks(0)).doesNotThrowAnyException(); + assertThatCode(() -> SpanLimits.builder().setMaxNumberOfAttributesPerEvent(0)) + .doesNotThrowAnyException(); + assertThatCode(() -> SpanLimits.builder().setMaxNumberOfAttributesPerLink(0)) + .doesNotThrowAnyException(); + assertThatCode(() -> SpanLimits.builder().setMaxAttributeValueLength(0)) + .doesNotThrowAnyException(); + } } diff --git a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/export/BatchSpanProcessorTest.java b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/export/BatchSpanProcessorTest.java index e659df55183..89aff56d9c7 100644 --- a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/export/BatchSpanProcessorTest.java +++ b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/export/BatchSpanProcessorTest.java @@ -14,6 +14,8 @@ import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import io.opentelemetry.api.internal.GuardedBy; @@ -26,6 +28,7 @@ import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.samplers.Sampler; import io.opentelemetry.sdk.trace.samplers.SamplingResult; +import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -121,6 +124,47 @@ void builderInvalidConfig() { assertThatThrownBy(() -> BatchSpanProcessor.builder(mockSpanExporter).setExporterTimeout(null)) .isInstanceOf(NullPointerException.class) .hasMessage("timeout"); + assertThatThrownBy(() -> BatchSpanProcessor.builder(mockSpanExporter).setMaxQueueSize(0)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("maxQueueSize must be positive."); + } + + @Test + void builderAdjustMaxBatchSize() { + SpanExporter dummyExporter = new CompletableSpanExporter(); + + BatchSpanProcessorBuilder builder = + BatchSpanProcessor.builder(dummyExporter).setMaxQueueSize(513).setMaxExportBatchSize(1000); + builder.build(); + + assertThat(builder.getMaxExportBatchSize()).isEqualTo(513); + assertThat(builder.getMaxQueueSize()).isEqualTo(513); + } + + @Test + void maxExportBatchSizeExceedsQueueSize() throws InterruptedException { + // Given a processor configured with a maxExportBatchSize > maxQueueSize, ensure that after n = + // maxQueueSize spans are ended, export is triggered and that the queue is fully drained and + // exported. + int maxQueueSize = 2048; + when(mockSpanExporter.export(any())).thenReturn(CompletableResultCode.ofSuccess()); + sdkTracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor( + BatchSpanProcessor.builder(mockSpanExporter) + .setScheduleDelay(Duration.ofSeconds(Integer.MAX_VALUE)) + .setMaxExportBatchSize(2049) + .setMaxQueueSize(maxQueueSize) + .build()) + .build(); + + for (int i = 0; i < maxQueueSize; i++) { + createEndedSpan("span " + i); + } + + Thread.sleep(10); + + await().untilAsserted(() -> verify(mockSpanExporter, times(1)).export(any())); } @Test @@ -419,6 +463,7 @@ public void continuesIfExporterTimesOut() throws InterruptedException { .setExporterTimeout(exporterTimeoutMillis, TimeUnit.MILLISECONDS) .setScheduleDelay(1, TimeUnit.MILLISECONDS) .setMaxQueueSize(1) + .setMaxExportBatchSize(1) .build(); sdkTracerProvider = SdkTracerProvider.builder().addSpanProcessor(bsp).build(); @@ -517,6 +562,38 @@ void exportNotSampledSpans_recordOnly() { assertThat(exported).containsExactly(span.toSpanData()); } + @Test + void exportUnsampledSpans_recordOnly() { + WaitingSpanExporter waitingSpanExporter = + new WaitingSpanExporter(1, CompletableResultCode.ofSuccess()); + + when(mockSampler.shouldSample(any(), any(), any(), any(), any(), anyList())) + .thenReturn(SamplingResult.recordOnly()); + sdkTracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor( + BatchSpanProcessor.builder(waitingSpanExporter) + .setExportUnsampledSpans(true) + .setScheduleDelay(MAX_SCHEDULE_DELAY_MILLIS, TimeUnit.MILLISECONDS) + .build()) + .setSampler(mockSampler) + .build(); + + ReadableSpan span1 = createEndedSpan(SPAN_NAME_1); + when(mockSampler.shouldSample(any(), any(), any(), any(), any(), anyList())) + .thenReturn(SamplingResult.recordAndSample()); + ReadableSpan span2 = createEndedSpan(SPAN_NAME_2); + + // Spans are recorded and exported in the same order as they are ended, we test that a non + // exported span is not exported by creating and ending a sampled span after a non sampled span + // and checking that the first exported span is the sampled span (the non sampled did not get + // exported). + List exported = waitingSpanExporter.waitForExport(); + // Need to check this because otherwise the variable span1 is unused, other option is to not + // have a span1 variable. + assertThat(exported).containsExactly(span1.toSpanData(), span2.toSpanData()); + } + @Test @Timeout(10) @SuppressLogger(SdkTracerProvider.class) @@ -560,6 +637,12 @@ void shutdownPropagatesFailure() { assertThat(result.isSuccess()).isFalse(); } + @Test + void getSpanExporter() { + assertThat(BatchSpanProcessor.builder(mockSpanExporter).build().getSpanExporter()) + .isSameAs(mockSpanExporter); + } + @Test void stringRepresentation() { BatchSpanProcessor processor = BatchSpanProcessor.builder(mockSpanExporter).build(); @@ -569,6 +652,7 @@ void stringRepresentation() { .hasToString( "BatchSpanProcessor{" + "spanExporter=mockSpanExporter, " + + "exportUnsampledSpans=false, " + "scheduleDelayNanos=5000000000, " + "maxExportBatchSize=512, " + "exporterTimeoutNanos=30000000000}"); diff --git a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/export/SimpleSpanProcessorTest.java b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/export/SimpleSpanProcessorTest.java index da46ec2a641..62b1d59a84f 100644 --- a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/export/SimpleSpanProcessorTest.java +++ b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/export/SimpleSpanProcessorTest.java @@ -59,7 +59,12 @@ class SimpleSpanProcessorTest { SpanId.getInvalid(), TraceFlags.getSampled(), TraceState.getDefault()); - private static final SpanContext NOT_SAMPLED_SPAN_CONTEXT = SpanContext.getInvalid(); + private static final SpanContext NOT_SAMPLED_SPAN_CONTEXT = + SpanContext.create( + TraceId.getInvalid(), + SpanId.getInvalid(), + TraceFlags.getDefault(), + TraceState.getDefault()); private SpanProcessor simpleSampledSpansProcessor; @@ -100,29 +105,29 @@ void onEndSync_NotSampledSpan() { } @Test - void onEndSync_OnlySampled_NotSampledSpan() { + void onEndSync_ExportUnsampledSpans_NotSampledSpan() { + SpanData spanData = TestUtils.makeBasicSpan(); when(readableSpan.getSpanContext()).thenReturn(NOT_SAMPLED_SPAN_CONTEXT); - when(readableSpan.toSpanData()) - .thenReturn(TestUtils.makeBasicSpan()) - .thenThrow(new RuntimeException()); - SpanProcessor simpleSpanProcessor = SimpleSpanProcessor.create(spanExporter); + when(readableSpan.toSpanData()).thenReturn(spanData); + SpanProcessor simpleSpanProcessor = + SimpleSpanProcessor.builder(spanExporter).setExportUnsampledSpans(true).build(); simpleSpanProcessor.onEnd(readableSpan); - verifyNoInteractions(spanExporter); + verify(spanExporter).export(Collections.singletonList(spanData)); } @Test - void onEndSync_OnlySampled_SampledSpan() { + void onEndSync_ExportUnsampledSpans_SampledSpan() { + SpanData spanData = TestUtils.makeBasicSpan(); when(readableSpan.getSpanContext()).thenReturn(SAMPLED_SPAN_CONTEXT); - when(readableSpan.toSpanData()) - .thenReturn(TestUtils.makeBasicSpan()) - .thenThrow(new RuntimeException()); - SpanProcessor simpleSpanProcessor = SimpleSpanProcessor.create(spanExporter); + when(readableSpan.toSpanData()).thenReturn(spanData); + SpanProcessor simpleSpanProcessor = + SimpleSpanProcessor.builder(spanExporter).setExportUnsampledSpans(true).build(); simpleSpanProcessor.onEnd(readableSpan); - verify(spanExporter).export(Collections.singletonList(TestUtils.makeBasicSpan())); + verify(spanExporter).export(Collections.singletonList(spanData)); } @Test - void tracerSdk_NotSampled_Span() { + void tracerSdk_SampledSpan() { WaitingSpanExporter waitingSpanExporter = new WaitingSpanExporter(1, CompletableResultCode.ofSuccess()); @@ -159,25 +164,43 @@ void tracerSdk_NotSampled_Span() { } @Test - void tracerSdk_NotSampled_RecordingEventsSpan() { - // TODO(bdrutu): Fix this when Sampler return RECORD_ONLY option. - /* - tracer.addSpanProcessor( - BatchSpanProcessor.builder(waitingSpanExporter) - .setScheduleDelayMillis(MAX_SCHEDULE_DELAY_MILLIS) - .reportOnlySampled(false) - .build()); - - io.opentelemetry.trace.Span span = - tracer - .spanBuilder("FOO") - .setSampler(Samplers.neverSample()) - .startSpanWithSampler(); - span.end(); - - List exported = waitingSpanExporter.waitForExport(1); - assertThat(exported).containsExactly(((ReadableSpan) span).toSpanData()); - */ + void tracerSdk_ExportUnsampledSpans_NotSampledSpan() { + WaitingSpanExporter waitingSpanExporter = + new WaitingSpanExporter(1, CompletableResultCode.ofSuccess()); + + SdkTracerProvider sdkTracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor( + SimpleSpanProcessor.builder(waitingSpanExporter) + .setExportUnsampledSpans(true) + .build()) + .setSampler(mockSampler) + .build(); + + when(mockSampler.shouldSample(any(), any(), any(), any(), any(), anyList())) + .thenReturn(SamplingResult.drop()); + + try { + Tracer tracer = sdkTracerProvider.get(getClass().getName()); + tracer.spanBuilder(SPAN_NAME).startSpan(); + tracer.spanBuilder(SPAN_NAME).startSpan(); + + when(mockSampler.shouldSample(any(), any(), any(), any(), any(), anyList())) + .thenReturn(SamplingResult.recordOnly()); + Span span = tracer.spanBuilder(SPAN_NAME).startSpan(); + span.end(); + + // Spans are recorded and exported in the same order as they are ended, we test that a non + // sampled span is not exported by creating and ending a sampled span after a non sampled span + // and checking that the first exported span is the sampled span (the non sampled did not get + // exported). + List exported = waitingSpanExporter.waitForExport(); + // Need to check this because otherwise the variable span1 is unused, other option is to not + // have a span1 variable. + assertThat(exported).containsExactly(((ReadableSpan) span).toSpanData()); + } finally { + sdkTracerProvider.shutdown(); + } } @Test @@ -252,4 +275,10 @@ void close() { simpleSampledSpansProcessor.close(); verify(spanExporter).shutdown(); } + + @Test + void getSpanExporter() { + assertThat(((SimpleSpanProcessor) SimpleSpanProcessor.create(spanExporter)).getSpanExporter()) + .isSameAs(spanExporter); + } } diff --git a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/internal/ExtendedSpanProcessorUsageTest.java b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/internal/ExtendedSpanProcessorUsageTest.java new file mode 100644 index 00000000000..3185aa9a1df --- /dev/null +++ b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/internal/ExtendedSpanProcessorUsageTest.java @@ -0,0 +1,85 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.trace.internal; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.ReadWriteSpan; +import io.opentelemetry.sdk.trace.ReadableSpan; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import org.junit.jupiter.api.Test; + +/** Demonstrating usage of {@link ExtendedSpanProcessor}. */ +class ExtendedSpanProcessorUsageTest { + + private static final AttributeKey FOO_KEY = AttributeKey.stringKey("foo"); + private static final AttributeKey BAR_KEY = AttributeKey.stringKey("bar"); + + private static class CopyFooToBarProcessor implements ExtendedSpanProcessor { + + @Override + public void onStart(Context parentContext, ReadWriteSpan span) {} + + @Override + public boolean isStartRequired() { + return false; + } + + @Override + public void onEnd(ReadableSpan span) {} + + @Override + public boolean isEndRequired() { + return false; + } + + @Override + public void onEnding(ReadWriteSpan span) { + String val = span.getAttribute(FOO_KEY); + span.setAttribute(BAR_KEY, val); + } + + @Override + public boolean isOnEndingRequired() { + return true; + } + } + + @Test + void extendedSpanProcessorUsage() { + InMemorySpanExporter exporter = InMemorySpanExporter.create(); + + try (SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(exporter)) + .addSpanProcessor(new CopyFooToBarProcessor()) + .build()) { + + Tracer tracer = tracerProvider.get("dummy-tracer"); + Span span = tracer.spanBuilder("my-span").startSpan(); + + span.setAttribute(FOO_KEY, "Hello!"); + + span.end(); + + assertThat(exporter.getFinishedSpanItems()) + .hasSize(1) + .first() + .satisfies( + spanData -> { + assertThat(spanData.getAttributes()) + .containsEntry(FOO_KEY, "Hello!") + .containsEntry(BAR_KEY, "Hello!"); + }); + } + } +} diff --git a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/samplers/ParentBasedSamplerTest.java b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/samplers/ParentBasedSamplerTest.java index 5264fabfff9..0d7238c912c 100644 --- a/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/samplers/ParentBasedSamplerTest.java +++ b/sdk/trace/src/test/java/io/opentelemetry/sdk/trace/samplers/ParentBasedSamplerTest.java @@ -410,6 +410,13 @@ void getDescription() { @Test void equals() { - EqualsVerifier.forClass(ParentBasedSampler.class).verify(); + EqualsVerifier.forClass(ParentBasedSampler.class) + .withNonnullFields( + "root", + "remoteParentSampled", + "remoteParentNotSampled", + "localParentSampled", + "localParentNotSampled") + .verify(); } } diff --git a/sdk/trace/src/testIncubating/java/io/opentelemetry/sdk/trace/ExtendedTracerTest.java b/sdk/trace/src/testIncubating/java/io/opentelemetry/sdk/trace/ExtendedTracerTest.java new file mode 100644 index 00000000000..c093b6da245 --- /dev/null +++ b/sdk/trace/src/testIncubating/java/io/opentelemetry/sdk/trace/ExtendedTracerTest.java @@ -0,0 +1,71 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.trace; + +import static io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder.nameEquals; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.trace.internal.TracerConfig.disabled; + +import io.opentelemetry.api.incubator.trace.ExtendedSpanBuilder; +import io.opentelemetry.api.incubator.trace.ExtendedTracer; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import java.util.function.Supplier; +import java.util.stream.Stream; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class ExtendedTracerTest { + + /** + * {@link ExtendedTracer#spanBuilder(String)} delegates to {@link SdkTracer#spanBuilder(String)} + * and casts the result to {@link ExtendedSpanBuilder}. Therefore, we need to confirm that {@link + * SdkTracer#spanBuilder(String)} correctly returns {@link ExtendedSpanBuilder} and not {@link + * io.opentelemetry.api.trace.SpanBuilder} in all cases, else the user will get {@link + * ClassCastException}. + */ + @ParameterizedTest + @MethodSource("spanBuilderArgs") + void spanBuilder(Supplier spanBuilderSupplier) { + ExtendedSpanBuilder spanBuilder = spanBuilderSupplier.get(); + assertThat(spanBuilder).isInstanceOf(ExtendedSpanBuilder.class); + } + + private static Stream spanBuilderArgs() { + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(InMemorySpanExporter.create())) + .addTracerConfiguratorCondition(nameEquals("tracerB"), disabled()) + .build(); + + ExtendedTracer tracerA = (ExtendedTracer) tracerProvider.get("tracerA"); + ExtendedTracer tracerB = (ExtendedTracer) tracerProvider.get("tracerB"); + + SdkTracerProvider tracerProvider2 = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(InMemorySpanExporter.create())) + .build(); + ExtendedTracer tracerC = (ExtendedTracer) tracerProvider.get("tracerC"); + tracerProvider2.shutdown(); + + return Stream.of( + // Simple case + Arguments.of(spanBuilderSupplier(() -> tracerA.spanBuilder("span"))), + // Disabled tracer + Arguments.of(spanBuilderSupplier(() -> tracerB.spanBuilder("span"))), + // Invalid span name + Arguments.of(spanBuilderSupplier(() -> tracerB.spanBuilder(null))), + Arguments.of(spanBuilderSupplier(() -> tracerB.spanBuilder(" "))), + // Shutdown tracer provider + Arguments.of(spanBuilderSupplier(() -> tracerC.spanBuilder("span")))); + } + + private static Supplier spanBuilderSupplier( + Supplier spanBuilderSupplier) { + return spanBuilderSupplier; + } +} diff --git a/sdk/trace/src/testIncubating/java/io/opentelemetry/sdk/trace/TracerConfigTest.java b/sdk/trace/src/testIncubating/java/io/opentelemetry/sdk/trace/TracerConfigTest.java new file mode 100644 index 00000000000..28a233c26e8 --- /dev/null +++ b/sdk/trace/src/testIncubating/java/io/opentelemetry/sdk/trace/TracerConfigTest.java @@ -0,0 +1,222 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.sdk.trace; + +import static io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder.nameEquals; +import static io.opentelemetry.sdk.internal.ScopeConfiguratorBuilder.nameMatchesGlob; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.trace.internal.TracerConfig.defaultConfig; +import static io.opentelemetry.sdk.trace.internal.TracerConfig.disabled; +import static io.opentelemetry.sdk.trace.internal.TracerConfig.enabled; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanId; +import io.opentelemetry.context.Scope; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.internal.ScopeConfigurator; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.internal.TracerConfig; +import java.util.stream.Stream; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class TracerConfigTest { + + @Test + void disableScopes() throws InterruptedException { + InMemorySpanExporter exporter = InMemorySpanExporter.create(); + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + // Disable tracerB. Since tracers are enabled by default, tracerA and tracerC are + // enabled. + .addTracerConfiguratorCondition(nameEquals("tracerB"), disabled()) + .addSpanProcessor(SimpleSpanProcessor.create(exporter)) + .build(); + + ExtendedSdkTracer tracerA = (ExtendedSdkTracer) tracerProvider.get("tracerA"); + ExtendedSdkTracer tracerB = (ExtendedSdkTracer) tracerProvider.get("tracerB"); + ExtendedSdkTracer tracerC = (ExtendedSdkTracer) tracerProvider.get("tracerC"); + + Span parent; + Span child; + Span grandchild; + + parent = tracerA.spanBuilder("parent").startSpan(); + try (Scope parentScope = parent.makeCurrent()) { + parent.setAttribute("a", "1"); + child = tracerB.spanBuilder("child").startSpan(); + // tracerB is disabled and should behave the same as noop tracer + assertThat(child.getSpanContext()).isEqualTo(parent.getSpanContext()); + assertThat(child.isRecording()).isFalse(); + try (Scope childScope = child.makeCurrent()) { + child.setAttribute("b", "1"); + grandchild = tracerC.spanBuilder("grandchild").startSpan(); + try (Scope grandchildScope = grandchild.makeCurrent()) { + grandchild.setAttribute("c", "1"); + Thread.sleep(100); + } finally { + grandchild.end(); + } + } finally { + child.end(); + } + } finally { + parent.end(); + } + + // Only contain tracerA:parent and tracerC:child should be seen + // tracerC:grandchild should list tracerA:parent as its parent + assertThat(exporter.getFinishedSpanItems()) + .satisfiesExactlyInAnyOrder( + spanData -> + assertThat(spanData) + .hasInstrumentationScopeInfo(InstrumentationScopeInfo.create("tracerA")) + .hasName("parent") + .hasSpanId(parent.getSpanContext().getSpanId()) + .hasParentSpanId(SpanId.getInvalid()) + .hasAttributes(Attributes.builder().put("a", "1").build()), + spanData -> + assertThat(spanData) + .hasInstrumentationScopeInfo(InstrumentationScopeInfo.create("tracerC")) + .hasName("grandchild") + .hasSpanId(grandchild.getSpanContext().getSpanId()) + .hasParentSpanId(parent.getSpanContext().getSpanId()) + .hasAttributes(Attributes.builder().put("c", "1").build())); + // tracerA and tracerC are enabled, tracerB is disabled. + assertThat(tracerA.isEnabled()).isTrue(); + assertThat(tracerB.isEnabled()).isFalse(); + assertThat(tracerC.isEnabled()).isTrue(); + } + + @ParameterizedTest + @MethodSource("tracerConfiguratorArgs") + void tracerConfigurator( + ScopeConfigurator tracerConfigurator, + InstrumentationScopeInfo scope, + TracerConfig expectedTracerConfig) { + TracerConfig tracerConfig = tracerConfigurator.apply(scope); + tracerConfig = tracerConfig == null ? defaultConfig() : tracerConfig; + assertThat(tracerConfig).isEqualTo(expectedTracerConfig); + } + + private static final InstrumentationScopeInfo scopeCat = InstrumentationScopeInfo.create("cat"); + private static final InstrumentationScopeInfo scopeDog = InstrumentationScopeInfo.create("dog"); + private static final InstrumentationScopeInfo scopeDuck = InstrumentationScopeInfo.create("duck"); + + private static Stream tracerConfiguratorArgs() { + ScopeConfigurator defaultConfigurator = + TracerConfig.configuratorBuilder().build(); + ScopeConfigurator disableCat = + TracerConfig.configuratorBuilder() + .addCondition(nameEquals("cat"), disabled()) + // Second matching rule for cat should be ignored + .addCondition(nameEquals("cat"), enabled()) + .build(); + ScopeConfigurator disableStartsWithD = + TracerConfig.configuratorBuilder().addCondition(nameMatchesGlob("d*"), disabled()).build(); + ScopeConfigurator enableCat = + TracerConfig.configuratorBuilder() + .setDefault(disabled()) + .addCondition(nameEquals("cat"), enabled()) + // Second matching rule for cat should be ignored + .addCondition(nameEquals("cat"), disabled()) + .build(); + ScopeConfigurator enableStartsWithD = + TracerConfig.configuratorBuilder() + .setDefault(disabled()) + .addCondition(nameMatchesGlob("d*"), TracerConfig.enabled()) + .build(); + + return Stream.of( + // default + Arguments.of(defaultConfigurator, scopeCat, defaultConfig()), + Arguments.of(defaultConfigurator, scopeDog, defaultConfig()), + Arguments.of(defaultConfigurator, scopeDuck, defaultConfig()), + // default enabled, disable cat + Arguments.of(disableCat, scopeCat, disabled()), + Arguments.of(disableCat, scopeDog, enabled()), + Arguments.of(disableCat, scopeDuck, enabled()), + // default enabled, disable pattern + Arguments.of(disableStartsWithD, scopeCat, enabled()), + Arguments.of(disableStartsWithD, scopeDog, disabled()), + Arguments.of(disableStartsWithD, scopeDuck, disabled()), + // default disabled, enable cat + Arguments.of(enableCat, scopeCat, enabled()), + Arguments.of(enableCat, scopeDog, disabled()), + Arguments.of(enableCat, scopeDuck, disabled()), + // default disabled, enable pattern + Arguments.of(enableStartsWithD, scopeCat, disabled()), + Arguments.of(enableStartsWithD, scopeDog, enabled()), + Arguments.of(enableStartsWithD, scopeDuck, enabled())); + } + + @Test + void setScopeConfigurator() { + // 1. Initially, configure all tracers to be enabled except tracerB + InMemorySpanExporter exporter = InMemorySpanExporter.create(); + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addTracerConfiguratorCondition(nameEquals("tracerB"), disabled()) + .addSpanProcessor(SimpleSpanProcessor.create(exporter)) + .build(); + + ExtendedSdkTracer tracerA = (ExtendedSdkTracer) tracerProvider.get("tracerA"); + ExtendedSdkTracer tracerB = (ExtendedSdkTracer) tracerProvider.get("tracerB"); + ExtendedSdkTracer tracerC = (ExtendedSdkTracer) tracerProvider.get("tracerC"); + + // verify isEnabled() + assertThat(tracerA.isEnabled()).isTrue(); + assertThat(tracerB.isEnabled()).isFalse(); + assertThat(tracerC.isEnabled()).isTrue(); + + // verify spans are emitted as expected + tracerA.spanBuilder("spanA").startSpan().end(); + tracerB.spanBuilder("spanB").startSpan().end(); + tracerC.spanBuilder("spanC").startSpan().end(); + assertThat(exporter.getFinishedSpanItems()) + .satisfiesExactlyInAnyOrder( + span -> assertThat(span).hasName("spanA"), span -> assertThat(span).hasName("spanC")); + exporter.reset(); + + // 2. Update config to disable all tracers + tracerProvider.setTracerConfigurator( + ScopeConfigurator.builder().setDefault(TracerConfig.disabled()).build()); + + // verify isEnabled() + assertThat(tracerA.isEnabled()).isFalse(); + assertThat(tracerB.isEnabled()).isFalse(); + assertThat(tracerC.isEnabled()).isFalse(); + + // verify spans are emitted as expected + tracerA.spanBuilder("spanA").startSpan().end(); + tracerB.spanBuilder("spanB").startSpan().end(); + tracerC.spanBuilder("spanC").startSpan().end(); + assertThat(exporter.getFinishedSpanItems()).isEmpty(); + + // 3. Update config to restore original + tracerProvider.setTracerConfigurator( + ScopeConfigurator.builder() + .addCondition(nameEquals("tracerB"), disabled()) + .build()); + + // verify isEnabled() + assertThat(tracerA.isEnabled()).isTrue(); + assertThat(tracerB.isEnabled()).isFalse(); + assertThat(tracerC.isEnabled()).isTrue(); + + // verify spans are emitted as expected + tracerA.spanBuilder("spanA").startSpan().end(); + tracerB.spanBuilder("spanB").startSpan().end(); + tracerC.spanBuilder("spanC").startSpan().end(); + assertThat(exporter.getFinishedSpanItems()) + .satisfiesExactly( + span -> assertThat(span).hasName("spanA"), span -> assertThat(span).hasName("spanC")); + } +} diff --git a/semconv/README.md b/semconv/README.md deleted file mode 100644 index c70c47abe06..00000000000 --- a/semconv/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# OpenTelemetry Semantic Conventions - -* This module contains generated code for the Semantic Conventions defined by the OpenTelemetry specification. -* Scripts for generating the classes live in the `buildscripts/semantic-convention` directory -at the top level of the project. diff --git a/semconv/build.gradle.kts b/semconv/build.gradle.kts deleted file mode 100644 index 6ddee0bd7b2..00000000000 --- a/semconv/build.gradle.kts +++ /dev/null @@ -1,13 +0,0 @@ -plugins { - id("otel.java-conventions") - id("otel.publish-conventions") - - id("otel.animalsniffer-conventions") -} - -description = "OpenTelemetry Semantic Conventions" -otelJava.moduleName.set("io.opentelemetry.semconv") - -dependencies { - api(project(":api:all")) -} diff --git a/semconv/gradle.properties b/semconv/gradle.properties deleted file mode 100644 index 4476ae57e31..00000000000 --- a/semconv/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -otel.release=alpha diff --git a/semconv/src/main/java/io/opentelemetry/semconv/resource/attributes/ResourceAttributes.java b/semconv/src/main/java/io/opentelemetry/semconv/resource/attributes/ResourceAttributes.java deleted file mode 100644 index af2dccf3274..00000000000 --- a/semconv/src/main/java/io/opentelemetry/semconv/resource/attributes/ResourceAttributes.java +++ /dev/null @@ -1,980 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.semconv.resource.attributes; - -import static io.opentelemetry.api.common.AttributeKey.booleanKey; -import static io.opentelemetry.api.common.AttributeKey.longKey; -import static io.opentelemetry.api.common.AttributeKey.stringArrayKey; -import static io.opentelemetry.api.common.AttributeKey.stringKey; - -import io.opentelemetry.api.common.AttributeKey; -import java.util.List; - -/** - * @deprecated Use {@code io.opentelemetry.semconv.ResourceAttributes} from io.opentelemetry.semconv:opentelemetry-semconv:{{version}} - * instead. - */ -@Deprecated -// DO NOT EDIT, this is an Auto-generated file from -// buildscripts/semantic-convention/templates/SemanticAttributes.java.j2 -@SuppressWarnings("unused") -public final class ResourceAttributes { - /** The URL of the OpenTelemetry schema for these keys and values. */ - public static final String SCHEMA_URL = "https://opentelemetry.io/schemas/1.20.0"; - - /** - * Array of brand name and version separated by a space - * - *

    Notes: - * - *

      - *
    • This value is intended to be taken from the UA client hints API ({@code - * navigator.userAgentData.brands}). - *
    - */ - public static final AttributeKey> BROWSER_BRANDS = stringArrayKey("browser.brands"); - - /** - * The platform on which the browser is running - * - *

    Notes: - * - *

      - *
    • This value is intended to be taken from the UA client hints API ({@code - * navigator.userAgentData.platform}). If unavailable, the legacy {@code navigator.platform} - * API SHOULD NOT be used instead and this attribute SHOULD be left unset in order for the - * values to be consistent. The list of possible values is defined in the W3C User-Agent Client - * Hints specification. Note that some (but not all) of these values can overlap with - * values in the {@code os.type} and {@code os.name} attributes. - * However, for consistency, the values in the {@code browser.platform} attribute should - * capture the exact value that the user agent provides. - *
    - */ - public static final AttributeKey BROWSER_PLATFORM = stringKey("browser.platform"); - - /** - * A boolean that is true if the browser is running on a mobile device - * - *

    Notes: - * - *

      - *
    • This value is intended to be taken from the UA client hints API ({@code - * navigator.userAgentData.mobile}). If unavailable, this attribute SHOULD be left unset. - *
    - */ - public static final AttributeKey BROWSER_MOBILE = booleanKey("browser.mobile"); - - /** - * Preferred language of the user using the browser - * - *

    Notes: - * - *

      - *
    • This value is intended to be taken from the Navigator API {@code navigator.language}. - *
    - */ - public static final AttributeKey BROWSER_LANGUAGE = stringKey("browser.language"); - - /** Name of the cloud provider. */ - public static final AttributeKey CLOUD_PROVIDER = stringKey("cloud.provider"); - - /** The cloud account ID the resource is assigned to. */ - public static final AttributeKey CLOUD_ACCOUNT_ID = stringKey("cloud.account.id"); - - /** - * The geographical region the resource is running. - * - *

    Notes: - * - *

    - */ - public static final AttributeKey CLOUD_REGION = stringKey("cloud.region"); - - /** - * Cloud provider-specific native identifier of the monitored cloud resource (e.g. an ARN on - * AWS, a fully - * qualified resource ID on Azure, a full resource - * name on GCP) - * - *

    Notes: - * - *

      - *
    • On some cloud providers, it may not be possible to determine the full ID at startup, so - * it may be necessary to set {@code cloud.resource_id} as a span attribute instead. - *
    • The exact value to use for {@code cloud.resource_id} depends on the cloud provider. The - * following well-known definitions MUST be used if you set this attribute and they apply: - *
    • AWS Lambda: The function ARN. - * Take care not to use the "invoked ARN" directly but replace any alias - * suffix with the resolved function version, as the same runtime instance may be - * invokable with multiple different aliases. - *
    • GCP: The URI of the resource - *
    • Azure: The Fully - * Qualified Resource ID of the invoked function, not the function app, having - * the form {@code - * /subscriptions//resourceGroups//providers/Microsoft.Web/sites//functions/}. - * This means that a span attribute MUST be used, as an Azure function app can host multiple - * functions that would usually share a TracerProvider. - *
    - */ - public static final AttributeKey CLOUD_RESOURCE_ID = stringKey("cloud.resource_id"); - - /** - * Cloud regions often have multiple, isolated locations known as zones to increase availability. - * Availability zone represents the zone where the resource is running. - * - *

    Notes: - * - *

      - *
    • Availability zones are called "zones" on Alibaba Cloud and Google Cloud. - *
    - */ - public static final AttributeKey CLOUD_AVAILABILITY_ZONE = - stringKey("cloud.availability_zone"); - - /** - * The cloud platform in use. - * - *

    Notes: - * - *

      - *
    • The prefix of the service SHOULD match the one specified in {@code cloud.provider}. - *
    - */ - public static final AttributeKey CLOUD_PLATFORM = stringKey("cloud.platform"); - - /** - * The Amazon Resource Name (ARN) of an ECS - * container instance. - */ - public static final AttributeKey AWS_ECS_CONTAINER_ARN = - stringKey("aws.ecs.container.arn"); - - /** - * The ARN of an ECS - * cluster. - */ - public static final AttributeKey AWS_ECS_CLUSTER_ARN = stringKey("aws.ecs.cluster.arn"); - - /** - * The launch - * type for an ECS task. - */ - public static final AttributeKey AWS_ECS_LAUNCHTYPE = stringKey("aws.ecs.launchtype"); - - /** - * The ARN of an ECS - * task definition. - */ - public static final AttributeKey AWS_ECS_TASK_ARN = stringKey("aws.ecs.task.arn"); - - /** The task definition family this task definition is a member of. */ - public static final AttributeKey AWS_ECS_TASK_FAMILY = stringKey("aws.ecs.task.family"); - - /** The revision for this task definition. */ - public static final AttributeKey AWS_ECS_TASK_REVISION = - stringKey("aws.ecs.task.revision"); - - /** The ARN of an EKS cluster. */ - public static final AttributeKey AWS_EKS_CLUSTER_ARN = stringKey("aws.eks.cluster.arn"); - - /** - * The name(s) of the AWS log group(s) an application is writing to. - * - *

    Notes: - * - *

      - *
    • Multiple log groups must be supported for cases like multi-container applications, where - * a single application has sidecar containers, and each write to their own log group. - *
    - */ - public static final AttributeKey> AWS_LOG_GROUP_NAMES = - stringArrayKey("aws.log.group.names"); - - /** - * The Amazon Resource Name(s) (ARN) of the AWS log group(s). - * - *

    Notes: - * - *

    - */ - public static final AttributeKey> AWS_LOG_GROUP_ARNS = - stringArrayKey("aws.log.group.arns"); - - /** The name(s) of the AWS log stream(s) an application is writing to. */ - public static final AttributeKey> AWS_LOG_STREAM_NAMES = - stringArrayKey("aws.log.stream.names"); - - /** - * The ARN(s) of the AWS log stream(s). - * - *

    Notes: - * - *

    - */ - public static final AttributeKey> AWS_LOG_STREAM_ARNS = - stringArrayKey("aws.log.stream.arns"); - - /** Time and date the release was created */ - public static final AttributeKey HEROKU_RELEASE_CREATION_TIMESTAMP = - stringKey("heroku.release.creation_timestamp"); - - /** Commit hash for the current release */ - public static final AttributeKey HEROKU_RELEASE_COMMIT = - stringKey("heroku.release.commit"); - - /** Unique identifier for the application */ - public static final AttributeKey HEROKU_APP_ID = stringKey("heroku.app.id"); - - /** Container name used by container runtime. */ - public static final AttributeKey CONTAINER_NAME = stringKey("container.name"); - - /** - * Container ID. Usually a UUID, as for example used to identify Docker - * containers. The UUID might be abbreviated. - */ - public static final AttributeKey CONTAINER_ID = stringKey("container.id"); - - /** The container runtime managing this container. */ - public static final AttributeKey CONTAINER_RUNTIME = stringKey("container.runtime"); - - /** Name of the image the container was built on. */ - public static final AttributeKey CONTAINER_IMAGE_NAME = stringKey("container.image.name"); - - /** Container image tag. */ - public static final AttributeKey CONTAINER_IMAGE_TAG = stringKey("container.image.tag"); - - /** - * Name of the deployment - * environment (aka deployment tier). - */ - public static final AttributeKey DEPLOYMENT_ENVIRONMENT = - stringKey("deployment.environment"); - - /** - * A unique identifier representing the device - * - *

    Notes: - * - *

      - *
    • The device identifier MUST only be defined using the values outlined below. This value is - * not an advertising identifier and MUST NOT be used as such. On iOS (Swift or - * Objective-C), this value MUST be equal to the vendor - * identifier. On Android (Java or Kotlin), this value MUST be equal to the Firebase - * Installation ID or a globally unique UUID which is persisted across sessions in your - * application. More information can be found here on best - * practices and exact implementation details. Caution should be taken when storing personal - * data or anything which can identify a user. GDPR and data protection laws may apply, - * ensure you do your own due diligence. - *
    - */ - public static final AttributeKey DEVICE_ID = stringKey("device.id"); - - /** - * The model identifier for the device - * - *

    Notes: - * - *

      - *
    • It's recommended this value represents a machine readable version of the model identifier - * rather than the market or consumer-friendly name of the device. - *
    - */ - public static final AttributeKey DEVICE_MODEL_IDENTIFIER = - stringKey("device.model.identifier"); - - /** - * The marketing name for the device model - * - *

    Notes: - * - *

      - *
    • It's recommended this value represents a human readable version of the device model - * rather than a machine readable alternative. - *
    - */ - public static final AttributeKey DEVICE_MODEL_NAME = stringKey("device.model.name"); - - /** - * The name of the device manufacturer - * - *

    Notes: - * - *

      - *
    • The Android OS provides this field via Build. - * iOS apps SHOULD hardcode the value {@code Apple}. - *
    - */ - public static final AttributeKey DEVICE_MANUFACTURER = stringKey("device.manufacturer"); - - /** - * The name of the single function that this runtime instance executes. - * - *

    Notes: - * - *

      - *
    • This is the name of the function as configured/deployed on the FaaS platform and is - * usually different from the name of the callback function (which may be stored in the {@code - * code.namespace}/{@code code.function} span attributes). - *
    • For some cloud providers, the above definition is ambiguous. The following definition of - * function name MUST be used for this attribute (and consequently the span name) for the - * listed cloud providers/products: - *
    • Azure: The full name {@code /}, i.e., function app name - * followed by a forward slash followed by the function name (this form can also be seen in - * the resource JSON for the function). This means that a span attribute MUST be used, as an - * Azure function app can host multiple functions that would usually share a TracerProvider - * (see also the {@code cloud.resource_id} attribute). - *
    - */ - public static final AttributeKey FAAS_NAME = stringKey("faas.name"); - - /** - * The immutable version of the function being executed. - * - *

    Notes: - * - *

      - *
    • Depending on the cloud provider and platform, use: - *
    • AWS Lambda: The function - * version (an integer represented as a decimal string). - *
    • Google Cloud Run: The revision (i.e., the - * function name plus the revision suffix). - *
    • Google Cloud Functions: The value of the {@code - * K_REVISION} environment variable. - *
    • Azure Functions: Not applicable. Do not set this attribute. - *
    - */ - public static final AttributeKey FAAS_VERSION = stringKey("faas.version"); - - /** - * The execution environment ID as a string, that will be potentially reused for other invocations - * to the same function/function version. - * - *

    Notes: - * - *

      - *
    • AWS Lambda: Use the (full) log stream name. - *
    - */ - public static final AttributeKey FAAS_INSTANCE = stringKey("faas.instance"); - - /** - * The amount of memory available to the serverless function converted to Bytes. - * - *

    Notes: - * - *

      - *
    • It's recommended to set this attribute since e.g. too little memory can easily stop a - * Java AWS Lambda function from working correctly. On AWS Lambda, the environment variable - * {@code AWS_LAMBDA_FUNCTION_MEMORY_SIZE} provides this information (which must be - * multiplied by 1,048,576). - *
    - */ - public static final AttributeKey FAAS_MAX_MEMORY = longKey("faas.max_memory"); - - /** - * Unique host ID. For Cloud, this must be the instance_id assigned by the cloud provider. For - * non-containerized systems, this should be the {@code machine-id}. See the table below for the - * sources to use to determine the {@code machine-id} based on operating system. - */ - public static final AttributeKey HOST_ID = stringKey("host.id"); - - /** - * Name of the host. On Unix systems, it may contain what the hostname command returns, or the - * fully qualified hostname, or another name specified by the user. - */ - public static final AttributeKey HOST_NAME = stringKey("host.name"); - - /** Type of host. For Cloud, this must be the machine type. */ - public static final AttributeKey HOST_TYPE = stringKey("host.type"); - - /** The CPU architecture the host system is running on. */ - public static final AttributeKey HOST_ARCH = stringKey("host.arch"); - - /** Name of the VM image or OS install the host was instantiated from. */ - public static final AttributeKey HOST_IMAGE_NAME = stringKey("host.image.name"); - - /** VM image ID. For Cloud, this value is from the provider. */ - public static final AttributeKey HOST_IMAGE_ID = stringKey("host.image.id"); - - /** - * The version string of the VM image as defined in Version - * Attributes. - */ - public static final AttributeKey HOST_IMAGE_VERSION = stringKey("host.image.version"); - - /** The name of the cluster. */ - public static final AttributeKey K8S_CLUSTER_NAME = stringKey("k8s.cluster.name"); - - /** The name of the Node. */ - public static final AttributeKey K8S_NODE_NAME = stringKey("k8s.node.name"); - - /** The UID of the Node. */ - public static final AttributeKey K8S_NODE_UID = stringKey("k8s.node.uid"); - - /** The name of the namespace that the pod is running in. */ - public static final AttributeKey K8S_NAMESPACE_NAME = stringKey("k8s.namespace.name"); - - /** The UID of the Pod. */ - public static final AttributeKey K8S_POD_UID = stringKey("k8s.pod.uid"); - - /** The name of the Pod. */ - public static final AttributeKey K8S_POD_NAME = stringKey("k8s.pod.name"); - - /** - * The name of the Container from Pod specification, must be unique within a Pod. Container - * runtime usually uses different globally unique name ({@code container.name}). - */ - public static final AttributeKey K8S_CONTAINER_NAME = stringKey("k8s.container.name"); - - /** - * Number of times the container was restarted. This attribute can be used to identify a - * particular container (running or stopped) within a container spec. - */ - public static final AttributeKey K8S_CONTAINER_RESTART_COUNT = - longKey("k8s.container.restart_count"); - - /** The UID of the ReplicaSet. */ - public static final AttributeKey K8S_REPLICASET_UID = stringKey("k8s.replicaset.uid"); - - /** The name of the ReplicaSet. */ - public static final AttributeKey K8S_REPLICASET_NAME = stringKey("k8s.replicaset.name"); - - /** The UID of the Deployment. */ - public static final AttributeKey K8S_DEPLOYMENT_UID = stringKey("k8s.deployment.uid"); - - /** The name of the Deployment. */ - public static final AttributeKey K8S_DEPLOYMENT_NAME = stringKey("k8s.deployment.name"); - - /** The UID of the StatefulSet. */ - public static final AttributeKey K8S_STATEFULSET_UID = stringKey("k8s.statefulset.uid"); - - /** The name of the StatefulSet. */ - public static final AttributeKey K8S_STATEFULSET_NAME = stringKey("k8s.statefulset.name"); - - /** The UID of the DaemonSet. */ - public static final AttributeKey K8S_DAEMONSET_UID = stringKey("k8s.daemonset.uid"); - - /** The name of the DaemonSet. */ - public static final AttributeKey K8S_DAEMONSET_NAME = stringKey("k8s.daemonset.name"); - - /** The UID of the Job. */ - public static final AttributeKey K8S_JOB_UID = stringKey("k8s.job.uid"); - - /** The name of the Job. */ - public static final AttributeKey K8S_JOB_NAME = stringKey("k8s.job.name"); - - /** The UID of the CronJob. */ - public static final AttributeKey K8S_CRONJOB_UID = stringKey("k8s.cronjob.uid"); - - /** The name of the CronJob. */ - public static final AttributeKey K8S_CRONJOB_NAME = stringKey("k8s.cronjob.name"); - - /** The operating system type. */ - public static final AttributeKey OS_TYPE = stringKey("os.type"); - - /** - * Human readable (not intended to be parsed) OS version information, like e.g. reported by {@code - * ver} or {@code lsb_release -a} commands. - */ - public static final AttributeKey OS_DESCRIPTION = stringKey("os.description"); - - /** Human readable operating system name. */ - public static final AttributeKey OS_NAME = stringKey("os.name"); - - /** - * The version string of the operating system as defined in Version Attributes. - */ - public static final AttributeKey OS_VERSION = stringKey("os.version"); - - /** Process identifier (PID). */ - public static final AttributeKey PROCESS_PID = longKey("process.pid"); - - /** Parent Process identifier (PID). */ - public static final AttributeKey PROCESS_PARENT_PID = longKey("process.parent_pid"); - - /** - * The name of the process executable. On Linux based systems, can be set to the {@code Name} in - * {@code proc/[pid]/status}. On Windows, can be set to the base name of {@code - * GetProcessImageFileNameW}. - */ - public static final AttributeKey PROCESS_EXECUTABLE_NAME = - stringKey("process.executable.name"); - - /** - * The full path to the process executable. On Linux based systems, can be set to the target of - * {@code proc/[pid]/exe}. On Windows, can be set to the result of {@code - * GetProcessImageFileNameW}. - */ - public static final AttributeKey PROCESS_EXECUTABLE_PATH = - stringKey("process.executable.path"); - - /** - * The command used to launch the process (i.e. the command name). On Linux based systems, can be - * set to the zeroth string in {@code proc/[pid]/cmdline}. On Windows, can be set to the first - * parameter extracted from {@code GetCommandLineW}. - */ - public static final AttributeKey PROCESS_COMMAND = stringKey("process.command"); - - /** - * The full command used to launch the process as a single string representing the full command. - * On Windows, can be set to the result of {@code GetCommandLineW}. Do not set this if you have to - * assemble it just for monitoring; use {@code process.command_args} instead. - */ - public static final AttributeKey PROCESS_COMMAND_LINE = stringKey("process.command_line"); - - /** - * All the command arguments (including the command/executable itself) as received by the process. - * On Linux-based systems (and some other Unixoid systems supporting procfs), can be set according - * to the list of null-delimited strings extracted from {@code proc/[pid]/cmdline}. For libc-based - * executables, this would be the full argv vector passed to {@code main}. - */ - public static final AttributeKey> PROCESS_COMMAND_ARGS = - stringArrayKey("process.command_args"); - - /** The username of the user that owns the process. */ - public static final AttributeKey PROCESS_OWNER = stringKey("process.owner"); - - /** - * The name of the runtime of this process. For compiled native binaries, this SHOULD be the name - * of the compiler. - */ - public static final AttributeKey PROCESS_RUNTIME_NAME = stringKey("process.runtime.name"); - - /** - * The version of the runtime of this process, as returned by the runtime without modification. - */ - public static final AttributeKey PROCESS_RUNTIME_VERSION = - stringKey("process.runtime.version"); - - /** - * An additional description about the runtime of the process, for example a specific vendor - * customization of the runtime environment. - */ - public static final AttributeKey PROCESS_RUNTIME_DESCRIPTION = - stringKey("process.runtime.description"); - - /** - * Logical name of the service. - * - *

    Notes: - * - *

      - *
    • MUST be the same for all instances of horizontally scaled services. If the value was not - * specified, SDKs MUST fallback to {@code unknown_service:} concatenated with {@code process.executable.name}, e.g. {@code - * unknown_service:bash}. If {@code process.executable.name} is not available, the value - * MUST be set to {@code unknown_service}. - *
    - */ - public static final AttributeKey SERVICE_NAME = stringKey("service.name"); - - /** - * A namespace for {@code service.name}. - * - *

    Notes: - * - *

      - *
    • A string value having a meaning that helps to distinguish a group of services, for - * example the team name that owns a group of services. {@code service.name} is expected to - * be unique within the same namespace. If {@code service.namespace} is not specified in the - * Resource then {@code service.name} is expected to be unique for all services that have no - * explicit namespace defined (so the empty/unspecified namespace is simply one more valid - * namespace). Zero-length namespace string is assumed equal to unspecified namespace. - *
    - */ - public static final AttributeKey SERVICE_NAMESPACE = stringKey("service.namespace"); - - /** - * The string ID of the service instance. - * - *

    Notes: - * - *

      - *
    • MUST be unique for each instance of the same {@code service.namespace,service.name} pair - * (in other words {@code service.namespace,service.name,service.instance.id} triplet MUST - * be globally unique). The ID helps to distinguish instances of the same service that exist - * at the same time (e.g. instances of a horizontally scaled service). It is preferable for - * the ID to be persistent and stay the same for the lifetime of the service instance, - * however it is acceptable that the ID is ephemeral and changes during important lifetime - * events for the service (e.g. service restarts). If the service has no inherent unique ID - * that can be used as the value of this attribute it is recommended to generate a random - * Version 1 or Version 4 RFC 4122 UUID (services aiming for reproducible UUIDs may also use - * Version 5, see RFC 4122 for more recommendations). - *
    - */ - public static final AttributeKey SERVICE_INSTANCE_ID = stringKey("service.instance.id"); - - /** The version string of the service API or implementation. */ - public static final AttributeKey SERVICE_VERSION = stringKey("service.version"); - - /** The name of the telemetry SDK as defined above. */ - public static final AttributeKey TELEMETRY_SDK_NAME = stringKey("telemetry.sdk.name"); - - /** The language of the telemetry SDK. */ - public static final AttributeKey TELEMETRY_SDK_LANGUAGE = - stringKey("telemetry.sdk.language"); - - /** The version string of the telemetry SDK. */ - public static final AttributeKey TELEMETRY_SDK_VERSION = - stringKey("telemetry.sdk.version"); - - /** The version string of the auto instrumentation agent, if used. */ - public static final AttributeKey TELEMETRY_AUTO_VERSION = - stringKey("telemetry.auto.version"); - - /** The name of the web engine. */ - public static final AttributeKey WEBENGINE_NAME = stringKey("webengine.name"); - - /** The version of the web engine. */ - public static final AttributeKey WEBENGINE_VERSION = stringKey("webengine.version"); - - /** Additional description of the web engine (e.g. detailed version and edition information). */ - public static final AttributeKey WEBENGINE_DESCRIPTION = - stringKey("webengine.description"); - - /** The name of the instrumentation scope - ({@code InstrumentationScope.Name} in OTLP). */ - public static final AttributeKey OTEL_SCOPE_NAME = stringKey("otel.scope.name"); - - /** The version of the instrumentation scope - ({@code InstrumentationScope.Version} in OTLP). */ - public static final AttributeKey OTEL_SCOPE_VERSION = stringKey("otel.scope.version"); - - /** - * Deprecated, use the {@code otel.scope.name} attribute. - * - * @deprecated Deprecated, use the `otel.scope.name` attribute. - */ - @Deprecated - public static final AttributeKey OTEL_LIBRARY_NAME = stringKey("otel.library.name"); - - /** - * Deprecated, use the {@code otel.scope.version} attribute. - * - * @deprecated Deprecated, use the `otel.scope.version` attribute. - */ - @Deprecated - public static final AttributeKey OTEL_LIBRARY_VERSION = stringKey("otel.library.version"); - - // Enum definitions - public static final class CloudProviderValues { - /** Alibaba Cloud. */ - public static final String ALIBABA_CLOUD = "alibaba_cloud"; - - /** Amazon Web Services. */ - public static final String AWS = "aws"; - - /** Microsoft Azure. */ - public static final String AZURE = "azure"; - - /** Google Cloud Platform. */ - public static final String GCP = "gcp"; - - /** Heroku Platform as a Service. */ - public static final String HEROKU = "heroku"; - - /** IBM Cloud. */ - public static final String IBM_CLOUD = "ibm_cloud"; - - /** Tencent Cloud. */ - public static final String TENCENT_CLOUD = "tencent_cloud"; - - private CloudProviderValues() {} - } - - public static final class CloudPlatformValues { - /** Alibaba Cloud Elastic Compute Service. */ - public static final String ALIBABA_CLOUD_ECS = "alibaba_cloud_ecs"; - - /** Alibaba Cloud Function Compute. */ - public static final String ALIBABA_CLOUD_FC = "alibaba_cloud_fc"; - - /** Red Hat OpenShift on Alibaba Cloud. */ - public static final String ALIBABA_CLOUD_OPENSHIFT = "alibaba_cloud_openshift"; - - /** AWS Elastic Compute Cloud. */ - public static final String AWS_EC2 = "aws_ec2"; - - /** AWS Elastic Container Service. */ - public static final String AWS_ECS = "aws_ecs"; - - /** AWS Elastic Kubernetes Service. */ - public static final String AWS_EKS = "aws_eks"; - - /** AWS Lambda. */ - public static final String AWS_LAMBDA = "aws_lambda"; - - /** AWS Elastic Beanstalk. */ - public static final String AWS_ELASTIC_BEANSTALK = "aws_elastic_beanstalk"; - - /** AWS App Runner. */ - public static final String AWS_APP_RUNNER = "aws_app_runner"; - - /** Red Hat OpenShift on AWS (ROSA). */ - public static final String AWS_OPENSHIFT = "aws_openshift"; - - /** Azure Virtual Machines. */ - public static final String AZURE_VM = "azure_vm"; - - /** Azure Container Instances. */ - public static final String AZURE_CONTAINER_INSTANCES = "azure_container_instances"; - - /** Azure Kubernetes Service. */ - public static final String AZURE_AKS = "azure_aks"; - - /** Azure Functions. */ - public static final String AZURE_FUNCTIONS = "azure_functions"; - - /** Azure App Service. */ - public static final String AZURE_APP_SERVICE = "azure_app_service"; - - /** Azure Red Hat OpenShift. */ - public static final String AZURE_OPENSHIFT = "azure_openshift"; - - /** Google Cloud Compute Engine (GCE). */ - public static final String GCP_COMPUTE_ENGINE = "gcp_compute_engine"; - - /** Google Cloud Run. */ - public static final String GCP_CLOUD_RUN = "gcp_cloud_run"; - - /** Google Cloud Kubernetes Engine (GKE). */ - public static final String GCP_KUBERNETES_ENGINE = "gcp_kubernetes_engine"; - - /** Google Cloud Functions (GCF). */ - public static final String GCP_CLOUD_FUNCTIONS = "gcp_cloud_functions"; - - /** Google Cloud App Engine (GAE). */ - public static final String GCP_APP_ENGINE = "gcp_app_engine"; - - /** Red Hat OpenShift on Google Cloud. */ - public static final String GCP_OPENSHIFT = "gcp_openshift"; - - /** Red Hat OpenShift on IBM Cloud. */ - public static final String IBM_CLOUD_OPENSHIFT = "ibm_cloud_openshift"; - - /** Tencent Cloud Cloud Virtual Machine (CVM). */ - public static final String TENCENT_CLOUD_CVM = "tencent_cloud_cvm"; - - /** Tencent Cloud Elastic Kubernetes Service (EKS). */ - public static final String TENCENT_CLOUD_EKS = "tencent_cloud_eks"; - - /** Tencent Cloud Serverless Cloud Function (SCF). */ - public static final String TENCENT_CLOUD_SCF = "tencent_cloud_scf"; - - private CloudPlatformValues() {} - } - - public static final class AwsEcsLaunchtypeValues { - /** ec2. */ - public static final String EC2 = "ec2"; - - /** fargate. */ - public static final String FARGATE = "fargate"; - - private AwsEcsLaunchtypeValues() {} - } - - public static final class HostArchValues { - /** AMD64. */ - public static final String AMD64 = "amd64"; - - /** ARM32. */ - public static final String ARM32 = "arm32"; - - /** ARM64. */ - public static final String ARM64 = "arm64"; - - /** Itanium. */ - public static final String IA64 = "ia64"; - - /** 32-bit PowerPC. */ - public static final String PPC32 = "ppc32"; - - /** 64-bit PowerPC. */ - public static final String PPC64 = "ppc64"; - - /** IBM z/Architecture. */ - public static final String S390X = "s390x"; - - /** 32-bit x86. */ - public static final String X86 = "x86"; - - private HostArchValues() {} - } - - public static final class OsTypeValues { - /** Microsoft Windows. */ - public static final String WINDOWS = "windows"; - - /** Linux. */ - public static final String LINUX = "linux"; - - /** Apple Darwin. */ - public static final String DARWIN = "darwin"; - - /** FreeBSD. */ - public static final String FREEBSD = "freebsd"; - - /** NetBSD. */ - public static final String NETBSD = "netbsd"; - - /** OpenBSD. */ - public static final String OPENBSD = "openbsd"; - - /** DragonFly BSD. */ - public static final String DRAGONFLYBSD = "dragonflybsd"; - - /** HP-UX (Hewlett Packard Unix). */ - public static final String HPUX = "hpux"; - - /** AIX (Advanced Interactive eXecutive). */ - public static final String AIX = "aix"; - - /** SunOS, Oracle Solaris. */ - public static final String SOLARIS = "solaris"; - - /** IBM z/OS. */ - public static final String Z_OS = "z_os"; - - private OsTypeValues() {} - } - - public static final class TelemetrySdkLanguageValues { - /** cpp. */ - public static final String CPP = "cpp"; - - /** dotnet. */ - public static final String DOTNET = "dotnet"; - - /** erlang. */ - public static final String ERLANG = "erlang"; - - /** go. */ - public static final String GO = "go"; - - /** java. */ - public static final String JAVA = "java"; - - /** nodejs. */ - public static final String NODEJS = "nodejs"; - - /** php. */ - public static final String PHP = "php"; - - /** python. */ - public static final String PYTHON = "python"; - - /** ruby. */ - public static final String RUBY = "ruby"; - - /** webjs. */ - public static final String WEBJS = "webjs"; - - /** swift. */ - public static final String SWIFT = "swift"; - - private TelemetrySdkLanguageValues() {} - } - - /** - * Red Hat OpenShift on Google Cloud. - * - * @deprecated This item has been removed as of 1.18.0 of the semantic conventions. Use {@link - * ResourceAttributes#GCP_OPENSHIFT} instead. - */ - @Deprecated public static final String GCP_OPENSHIFT = "gcp_openshift"; - - /** - * Full user-agent string provided by the browser - * - *

    Notes: - * - *

      - *
    • The user-agent value SHOULD be provided only from browsers that do not have a mechanism - * to retrieve brands and platform individually from the User-Agent Client Hints API. To - * retrieve the value, the legacy {@code navigator.userAgent} API can be used. - *
    - * - * @deprecated This item has been renamed in 1.19.0 version of the semantic conventions. Use - * {@link io.opentelemetry.semconv.trace.attributes.SemanticAttributes#USER_AGENT_ORIGINAL} - * instead. - */ - @Deprecated - public static final AttributeKey BROWSER_USER_AGENT = stringKey("browser.user_agent"); - - /** - * The unique ID of the single function that this runtime instance executes. - * - *

    Notes: - * - *

      - *
    • On some cloud providers, it may not be possible to determine the full ID at startup, so - * consider setting {@code faas.id} as a span attribute instead. - *
    • The exact value to use for {@code faas.id} depends on the cloud provider: - *
    • AWS Lambda: The function ARN. - * Take care not to use the "invoked ARN" directly but replace any alias - * suffix with the resolved function version, as the same runtime instance may be - * invokable with multiple different aliases. - *
    • GCP: The URI of the resource - *
    • Azure: The Fully - * Qualified Resource ID of the invoked function, not the function app, having - * the form {@code - * /subscriptions//resourceGroups//providers/Microsoft.Web/sites//functions/}. - * This means that a span attribute MUST be used, as an Azure function app can host multiple - * functions that would usually share a TracerProvider. - *
    - * - * @deprecated This item has been removed in 1.19.0 version of the semantic conventions. Use - * {@link ResourceAttributes#CLOUD_RESOURCE_ID} instead. - */ - @Deprecated public static final AttributeKey FAAS_ID = stringKey("faas.id"); - - private ResourceAttributes() {} -} diff --git a/semconv/src/main/java/io/opentelemetry/semconv/resource/attributes/package-info.java b/semconv/src/main/java/io/opentelemetry/semconv/resource/attributes/package-info.java deleted file mode 100644 index d893dd794bf..00000000000 --- a/semconv/src/main/java/io/opentelemetry/semconv/resource/attributes/package-info.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -/** - * OpenTelemetry semantic attributes for resources. - * - * @see io.opentelemetry.semconv.resource.attributes.ResourceAttributes - */ -@ParametersAreNonnullByDefault -package io.opentelemetry.semconv.resource.attributes; - -import javax.annotation.ParametersAreNonnullByDefault; diff --git a/semconv/src/main/java/io/opentelemetry/semconv/trace/attributes/SemanticAttributes.java b/semconv/src/main/java/io/opentelemetry/semconv/trace/attributes/SemanticAttributes.java deleted file mode 100644 index d05fbf85ae1..00000000000 --- a/semconv/src/main/java/io/opentelemetry/semconv/trace/attributes/SemanticAttributes.java +++ /dev/null @@ -1,2322 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.semconv.trace.attributes; - -import static io.opentelemetry.api.common.AttributeKey.booleanKey; -import static io.opentelemetry.api.common.AttributeKey.doubleKey; -import static io.opentelemetry.api.common.AttributeKey.longKey; -import static io.opentelemetry.api.common.AttributeKey.stringArrayKey; -import static io.opentelemetry.api.common.AttributeKey.stringKey; - -import io.opentelemetry.api.common.AttributeKey; -import java.util.List; - -/** - * @deprecated Use {@code io.opentelemetry.semconv.SemanticAttributes} from io.opentelemetry.semconv:opentelemetry-semconv:{{version}} - * instead. - */ -@Deprecated -// DO NOT EDIT, this is an Auto-generated file from -// buildscripts/semantic-convention/templates/SemanticAttributes.java.j2 -@SuppressWarnings("unused") -public final class SemanticAttributes { - /** The URL of the OpenTelemetry schema for these keys and values. */ - public static final String SCHEMA_URL = "https://opentelemetry.io/schemas/1.20.0"; - - /** - * The type of the exception (its fully-qualified class name, if applicable). The dynamic type of - * the exception should be preferred over the static type in languages that support it. - */ - public static final AttributeKey EXCEPTION_TYPE = stringKey("exception.type"); - - /** The exception message. */ - public static final AttributeKey EXCEPTION_MESSAGE = stringKey("exception.message"); - - /** - * A stacktrace as a string in the natural representation for the language runtime. The - * representation is to be determined and documented by each language SIG. - */ - public static final AttributeKey EXCEPTION_STACKTRACE = stringKey("exception.stacktrace"); - - /** HTTP request method. */ - public static final AttributeKey HTTP_METHOD = stringKey("http.method"); - - /** HTTP response status code. */ - public static final AttributeKey HTTP_STATUS_CODE = longKey("http.status_code"); - - /** The URI scheme identifying the used protocol. */ - public static final AttributeKey HTTP_SCHEME = stringKey("http.scheme"); - - /** - * The matched route (path template in the format used by the respective server framework). See - * note below - * - *

    Notes: - * - *

      - *
    • MUST NOT be populated when this is not supported by the HTTP server framework as the - * route attribute should have low-cardinality and the URI path can NOT substitute it. - * SHOULD include the application - * root if there is one. - *
    - */ - public static final AttributeKey HTTP_ROUTE = stringKey("http.route"); - - /** The name identifies the event. */ - public static final AttributeKey EVENT_NAME = stringKey("event.name"); - - /** - * The domain identifies the business context for the events. - * - *

    Notes: - * - *

      - *
    • Events across different domains may have same {@code event.name}, yet be unrelated - * events. - *
    - */ - public static final AttributeKey EVENT_DOMAIN = stringKey("event.domain"); - - /** - * A unique identifier for the Log Record. - * - *

    Notes: - * - *

      - *
    • If an id is provided, other log records with the same id will be considered duplicates - * and can be removed safely. This means, that two distinguishable log records MUST have - * different values. The id MAY be an Universally - * Unique Lexicographically Sortable Identifier (ULID), but other identifiers (e.g. - * UUID) may be used as needed. - *
    - */ - public static final AttributeKey LOG_RECORD_UID = stringKey("log.record.uid"); - - /** - * The full invoked ARN as provided on the {@code Context} passed to the function ({@code - * Lambda-Runtime-Invoked-Function-Arn} header on the {@code /runtime/invocation/next} - * applicable). - * - *

    Notes: - * - *

      - *
    • This may be different from {@code cloud.resource_id} if an alias is involved. - *
    - */ - public static final AttributeKey AWS_LAMBDA_INVOKED_ARN = - stringKey("aws.lambda.invoked_arn"); - - /** - * The event_id - * uniquely identifies the event. - */ - public static final AttributeKey CLOUDEVENTS_EVENT_ID = stringKey("cloudevents.event_id"); - - /** - * The source - * identifies the context in which an event happened. - */ - public static final AttributeKey CLOUDEVENTS_EVENT_SOURCE = - stringKey("cloudevents.event_source"); - - /** - * The version - * of the CloudEvents specification which the event uses. - */ - public static final AttributeKey CLOUDEVENTS_EVENT_SPEC_VERSION = - stringKey("cloudevents.event_spec_version"); - - /** - * The event_type - * contains a value describing the type of event related to the originating occurrence. - */ - public static final AttributeKey CLOUDEVENTS_EVENT_TYPE = - stringKey("cloudevents.event_type"); - - /** - * The subject - * of the event in the context of the event producer (identified by source). - */ - public static final AttributeKey CLOUDEVENTS_EVENT_SUBJECT = - stringKey("cloudevents.event_subject"); - - /** - * Parent-child Reference type - * - *

    Notes: - * - *

      - *
    • The causal relationship between a child Span and a parent Span. - *
    - */ - public static final AttributeKey OPENTRACING_REF_TYPE = stringKey("opentracing.ref_type"); - - /** - * An identifier for the database management system (DBMS) product being used. See below for a - * list of well-known identifiers. - */ - public static final AttributeKey DB_SYSTEM = stringKey("db.system"); - - /** - * The connection string used to connect to the database. It is recommended to remove embedded - * credentials. - */ - public static final AttributeKey DB_CONNECTION_STRING = stringKey("db.connection_string"); - - /** Username for accessing the database. */ - public static final AttributeKey DB_USER = stringKey("db.user"); - - /** - * The fully-qualified class name of the Java Database Connectivity - * (JDBC) driver used to connect. - */ - public static final AttributeKey DB_JDBC_DRIVER_CLASSNAME = - stringKey("db.jdbc.driver_classname"); - - /** - * This attribute is used to report the name of the database being accessed. For commands that - * switch the database, this should be set to the target database (even if the command fails). - * - *

    Notes: - * - *

      - *
    • In some SQL databases, the database name to be used is called "schema name". In - * case there are multiple layers that could be considered for database name (e.g. Oracle - * instance name and schema name), the database name to be used is the more specific layer - * (e.g. Oracle schema name). - *
    - */ - public static final AttributeKey DB_NAME = stringKey("db.name"); - - /** The database statement being executed. */ - public static final AttributeKey DB_STATEMENT = stringKey("db.statement"); - - /** - * The name of the operation being executed, e.g. the MongoDB command - * name such as {@code findAndModify}, or the SQL keyword. - * - *

    Notes: - * - *

      - *
    • When setting this to an SQL keyword, it is not recommended to attempt any client-side - * parsing of {@code db.statement} just to get this property, but it should be set if the - * operation name is provided by the library being instrumented. If the SQL statement has an - * ambiguous operation, or performs more than one operation, this value may be omitted. - *
    - */ - public static final AttributeKey DB_OPERATION = stringKey("db.operation"); - - /** - * The Microsoft SQL Server instance - * name connecting to. This name is used to determine the port of a named instance. - * - *

    Notes: - * - *

      - *
    • If setting a {@code db.mssql.instance_name}, {@code net.peer.port} is no longer required - * (but still recommended if non-standard). - *
    - */ - public static final AttributeKey DB_MSSQL_INSTANCE_NAME = - stringKey("db.mssql.instance_name"); - - /** The fetch size used for paging, i.e. how many rows will be returned at once. */ - public static final AttributeKey DB_CASSANDRA_PAGE_SIZE = longKey("db.cassandra.page_size"); - - /** - * The consistency level of the query. Based on consistency values from CQL. - */ - public static final AttributeKey DB_CASSANDRA_CONSISTENCY_LEVEL = - stringKey("db.cassandra.consistency_level"); - - /** - * The name of the primary table that the operation is acting upon, including the keyspace name - * (if applicable). - * - *

    Notes: - * - *

      - *
    • This mirrors the db.sql.table attribute but references cassandra rather than sql. It is - * not recommended to attempt any client-side parsing of {@code db.statement} just to get - * this property, but it should be set if it is provided by the library being instrumented. - * If the operation is acting upon an anonymous table, or more than one table, this value - * MUST NOT be set. - *
    - */ - public static final AttributeKey DB_CASSANDRA_TABLE = stringKey("db.cassandra.table"); - - /** Whether or not the query is idempotent. */ - public static final AttributeKey DB_CASSANDRA_IDEMPOTENCE = - booleanKey("db.cassandra.idempotence"); - - /** - * The number of times a query was speculatively executed. Not set or {@code 0} if the query was - * not executed speculatively. - */ - public static final AttributeKey DB_CASSANDRA_SPECULATIVE_EXECUTION_COUNT = - longKey("db.cassandra.speculative_execution_count"); - - /** The ID of the coordinating node for a query. */ - public static final AttributeKey DB_CASSANDRA_COORDINATOR_ID = - stringKey("db.cassandra.coordinator.id"); - - /** The data center of the coordinating node for a query. */ - public static final AttributeKey DB_CASSANDRA_COORDINATOR_DC = - stringKey("db.cassandra.coordinator.dc"); - - /** - * The index of the database being accessed as used in the {@code SELECT} command, provided as an integer. To - * be used instead of the generic {@code db.name} attribute. - */ - public static final AttributeKey DB_REDIS_DATABASE_INDEX = - longKey("db.redis.database_index"); - - /** The collection being accessed within the database stated in {@code db.name}. */ - public static final AttributeKey DB_MONGODB_COLLECTION = - stringKey("db.mongodb.collection"); - - /** - * The name of the primary table that the operation is acting upon, including the database name - * (if applicable). - * - *

    Notes: - * - *

      - *
    • It is not recommended to attempt any client-side parsing of {@code db.statement} just to - * get this property, but it should be set if it is provided by the library being - * instrumented. If the operation is acting upon an anonymous table, or more than one table, - * this value MUST NOT be set. - *
    - */ - public static final AttributeKey DB_SQL_TABLE = stringKey("db.sql.table"); - - /** Unique Cosmos client instance id. */ - public static final AttributeKey DB_COSMOSDB_CLIENT_ID = - stringKey("db.cosmosdb.client_id"); - - /** CosmosDB Operation Type. */ - public static final AttributeKey DB_COSMOSDB_OPERATION_TYPE = - stringKey("db.cosmosdb.operation_type"); - - /** Cosmos client connection mode. */ - public static final AttributeKey DB_COSMOSDB_CONNECTION_MODE = - stringKey("db.cosmosdb.connection_mode"); - - /** Cosmos DB container name. */ - public static final AttributeKey DB_COSMOSDB_CONTAINER = - stringKey("db.cosmosdb.container"); - - /** Request payload size in bytes */ - public static final AttributeKey DB_COSMOSDB_REQUEST_CONTENT_LENGTH = - longKey("db.cosmosdb.request_content_length"); - - /** Cosmos DB status code. */ - public static final AttributeKey DB_COSMOSDB_STATUS_CODE = - longKey("db.cosmosdb.status_code"); - - /** Cosmos DB sub status code. */ - public static final AttributeKey DB_COSMOSDB_SUB_STATUS_CODE = - longKey("db.cosmosdb.sub_status_code"); - - /** RU consumed for that operation */ - public static final AttributeKey DB_COSMOSDB_REQUEST_CHARGE = - doubleKey("db.cosmosdb.request_charge"); - - /** - * Name of the code, either "OK" or "ERROR". MUST NOT be set if the status - * code is UNSET. - */ - public static final AttributeKey OTEL_STATUS_CODE = stringKey("otel.status_code"); - - /** Description of the Status if it has a value, otherwise not set. */ - public static final AttributeKey OTEL_STATUS_DESCRIPTION = - stringKey("otel.status_description"); - - /** - * Type of the trigger which caused this function invocation. - * - *

    Notes: - * - *

      - *
    • For the server/consumer span on the incoming side, {@code faas.trigger} MUST be set. - *
    • Clients invoking FaaS instances usually cannot set {@code faas.trigger}, since they would - * typically need to look in the payload to determine the event type. If clients set it, it - * should be the same as the trigger that corresponding incoming would have (i.e., this has - * nothing to do with the underlying transport used to make the API call to invoke the - * lambda, which is often HTTP). - *
    - */ - public static final AttributeKey FAAS_TRIGGER = stringKey("faas.trigger"); - - /** The invocation ID of the current function invocation. */ - public static final AttributeKey FAAS_INVOCATION_ID = stringKey("faas.invocation_id"); - - /** - * The name of the source on which the triggering operation was performed. For example, in Cloud - * Storage or S3 corresponds to the bucket name, and in Cosmos DB to the database name. - */ - public static final AttributeKey FAAS_DOCUMENT_COLLECTION = - stringKey("faas.document.collection"); - - /** Describes the type of the operation that was performed on the data. */ - public static final AttributeKey FAAS_DOCUMENT_OPERATION = - stringKey("faas.document.operation"); - - /** - * A string containing the time when the data was accessed in the ISO 8601 format expressed in - * UTC. - */ - public static final AttributeKey FAAS_DOCUMENT_TIME = stringKey("faas.document.time"); - - /** - * The document name/table subjected to the operation. For example, in Cloud Storage or S3 is the - * name of the file, and in Cosmos DB the table name. - */ - public static final AttributeKey FAAS_DOCUMENT_NAME = stringKey("faas.document.name"); - - /** - * A string containing the function invocation time in the ISO 8601 format expressed in - * UTC. - */ - public static final AttributeKey FAAS_TIME = stringKey("faas.time"); - - /** - * A string containing the schedule period as Cron - * Expression. - */ - public static final AttributeKey FAAS_CRON = stringKey("faas.cron"); - - /** - * A boolean that is true if the serverless function is executed for the first time (aka - * cold-start). - */ - public static final AttributeKey FAAS_COLDSTART = booleanKey("faas.coldstart"); - - /** - * The name of the invoked function. - * - *

    Notes: - * - *

      - *
    • SHOULD be equal to the {@code faas.name} resource attribute of the invoked function. - *
    - */ - public static final AttributeKey FAAS_INVOKED_NAME = stringKey("faas.invoked_name"); - - /** - * The cloud provider of the invoked function. - * - *

    Notes: - * - *

      - *
    • SHOULD be equal to the {@code cloud.provider} resource attribute of the invoked function. - *
    - */ - public static final AttributeKey FAAS_INVOKED_PROVIDER = - stringKey("faas.invoked_provider"); - - /** - * The cloud region of the invoked function. - * - *

    Notes: - * - *

      - *
    • SHOULD be equal to the {@code cloud.region} resource attribute of the invoked function. - *
    - */ - public static final AttributeKey FAAS_INVOKED_REGION = stringKey("faas.invoked_region"); - - /** The unique identifier of the feature flag. */ - public static final AttributeKey FEATURE_FLAG_KEY = stringKey("feature_flag.key"); - - /** The name of the service provider that performs the flag evaluation. */ - public static final AttributeKey FEATURE_FLAG_PROVIDER_NAME = - stringKey("feature_flag.provider_name"); - - /** - * SHOULD be a semantic identifier for a value. If one is unavailable, a stringified version of - * the value can be used. - * - *

    Notes: - * - *

      - *
    • A semantic identifier, commonly referred to as a variant, provides a means for referring - * to a value without including the value itself. This can provide additional context for - * understanding the meaning behind a value. For example, the variant {@code red} maybe be - * used for the value {@code #c05543}. - *
    • A stringified version of the value can be used in situations where a semantic identifier - * is unavailable. String representation of the value should be determined by the - * implementer. - *
    - */ - public static final AttributeKey FEATURE_FLAG_VARIANT = stringKey("feature_flag.variant"); - - /** Transport protocol used. See note below. */ - public static final AttributeKey NET_TRANSPORT = stringKey("net.transport"); - - /** Application layer protocol used. The value SHOULD be normalized to lowercase. */ - public static final AttributeKey NET_PROTOCOL_NAME = stringKey("net.protocol.name"); - - /** - * Version of the application layer protocol used. See note below. - * - *

    Notes: - * - *

      - *
    • {@code net.protocol.version} refers to the version of the protocol used and might be - * different from the protocol client's version. If the HTTP client used has a version of - * {@code 0.27.2}, but sends HTTP version {@code 1.1}, this attribute should be set to - * {@code 1.1}. - *
    - */ - public static final AttributeKey NET_PROTOCOL_VERSION = stringKey("net.protocol.version"); - - /** Remote socket peer name. */ - public static final AttributeKey NET_SOCK_PEER_NAME = stringKey("net.sock.peer.name"); - - /** - * Remote socket peer address: IPv4 or IPv6 for internet protocols, path for local communication, - * etc. - */ - public static final AttributeKey NET_SOCK_PEER_ADDR = stringKey("net.sock.peer.addr"); - - /** Remote socket peer port. */ - public static final AttributeKey NET_SOCK_PEER_PORT = longKey("net.sock.peer.port"); - - /** - * Protocol address - * family which is used for communication. - */ - public static final AttributeKey NET_SOCK_FAMILY = stringKey("net.sock.family"); - - /** - * Logical remote hostname, see note below. - * - *

    Notes: - * - *

      - *
    • {@code net.peer.name} SHOULD NOT be set if capturing it would require an extra DNS - * lookup. - *
    - */ - public static final AttributeKey NET_PEER_NAME = stringKey("net.peer.name"); - - /** Logical remote port number */ - public static final AttributeKey NET_PEER_PORT = longKey("net.peer.port"); - - /** Logical local hostname or similar, see note below. */ - public static final AttributeKey NET_HOST_NAME = stringKey("net.host.name"); - - /** Logical local port number, preferably the one that the peer used to connect */ - public static final AttributeKey NET_HOST_PORT = longKey("net.host.port"); - - /** Local socket address. Useful in case of a multi-IP host. */ - public static final AttributeKey NET_SOCK_HOST_ADDR = stringKey("net.sock.host.addr"); - - /** Local socket port number. */ - public static final AttributeKey NET_SOCK_HOST_PORT = longKey("net.sock.host.port"); - - /** The internet connection type currently being used by the host. */ - public static final AttributeKey NET_HOST_CONNECTION_TYPE = - stringKey("net.host.connection.type"); - - /** - * This describes more details regarding the connection.type. It may be the type of cell - * technology connection, but it could be used for describing details about a wifi connection. - */ - public static final AttributeKey NET_HOST_CONNECTION_SUBTYPE = - stringKey("net.host.connection.subtype"); - - /** The name of the mobile carrier. */ - public static final AttributeKey NET_HOST_CARRIER_NAME = - stringKey("net.host.carrier.name"); - - /** The mobile carrier country code. */ - public static final AttributeKey NET_HOST_CARRIER_MCC = stringKey("net.host.carrier.mcc"); - - /** The mobile carrier network code. */ - public static final AttributeKey NET_HOST_CARRIER_MNC = stringKey("net.host.carrier.mnc"); - - /** The ISO 3166-1 alpha-2 2-character country code associated with the mobile carrier network. */ - public static final AttributeKey NET_HOST_CARRIER_ICC = stringKey("net.host.carrier.icc"); - - /** - * The {@code service.name} of - * the remote service. SHOULD be equal to the actual {@code service.name} resource attribute of - * the remote service if any. - */ - public static final AttributeKey PEER_SERVICE = stringKey("peer.service"); - - /** - * Username or client_id extracted from the access token or Authorization header in the inbound - * request from outside the system. - */ - public static final AttributeKey ENDUSER_ID = stringKey("enduser.id"); - - /** - * Actual/assumed role the client is making the request under extracted from token or application - * security context. - */ - public static final AttributeKey ENDUSER_ROLE = stringKey("enduser.role"); - - /** - * Scopes or granted authorities the client currently possesses extracted from token or - * application security context. The value would come from the scope associated with an OAuth 2.0 Access Token or an - * attribute value in a SAML - * 2.0 Assertion. - */ - public static final AttributeKey ENDUSER_SCOPE = stringKey("enduser.scope"); - - /** Current "managed" thread ID (as opposed to OS thread ID). */ - public static final AttributeKey THREAD_ID = longKey("thread.id"); - - /** Current thread name. */ - public static final AttributeKey THREAD_NAME = stringKey("thread.name"); - - /** - * The method or function name, or equivalent (usually rightmost part of the code unit's name). - */ - public static final AttributeKey CODE_FUNCTION = stringKey("code.function"); - - /** - * The "namespace" within which {@code code.function} is defined. Usually the qualified - * class or module name, such that {@code code.namespace} + some separator + {@code code.function} - * form a unique identifier for the code unit. - */ - public static final AttributeKey CODE_NAMESPACE = stringKey("code.namespace"); - - /** - * The source code file name that identifies the code unit as uniquely as possible (preferably an - * absolute file path). - */ - public static final AttributeKey CODE_FILEPATH = stringKey("code.filepath"); - - /** - * The line number in {@code code.filepath} best representing the operation. It SHOULD point - * within the code unit named in {@code code.function}. - */ - public static final AttributeKey CODE_LINENO = longKey("code.lineno"); - - /** - * The column number in {@code code.filepath} best representing the operation. It SHOULD point - * within the code unit named in {@code code.function}. - */ - public static final AttributeKey CODE_COLUMN = longKey("code.column"); - - /** - * The size of the request payload body in bytes. This is the number of bytes transferred - * excluding headers and is often, but not always, present as the Content-Length - * header. For requests using transport encoding, this should be the compressed size. - */ - public static final AttributeKey HTTP_REQUEST_CONTENT_LENGTH = - longKey("http.request_content_length"); - - /** - * The size of the response payload body in bytes. This is the number of bytes transferred - * excluding headers and is often, but not always, present as the Content-Length - * header. For requests using transport encoding, this should be the compressed size. - */ - public static final AttributeKey HTTP_RESPONSE_CONTENT_LENGTH = - longKey("http.response_content_length"); - - /** - * Full HTTP request URL in the form {@code scheme://host[:port]/path?query[#fragment]}. Usually - * the fragment is not transmitted over HTTP, but if it is known, it should be included - * nevertheless. - * - *

    Notes: - * - *

      - *
    • {@code http.url} MUST NOT contain credentials passed via URL in form of {@code - * https://username:password@www.example.com/}. In such case the attribute's value should be - * {@code https://www.example.com/}. - *
    - */ - public static final AttributeKey HTTP_URL = stringKey("http.url"); - - /** - * The ordinal number of request resending attempt (for any reason, including redirects). - * - *

    Notes: - * - *

      - *
    • The resend count SHOULD be updated each time an HTTP request gets resent by the client, - * regardless of what was the cause of the resending (e.g. redirection, authorization - * failure, 503 Server Unavailable, network issues, or any other). - *
    - */ - public static final AttributeKey HTTP_RESEND_COUNT = longKey("http.resend_count"); - - /** The full request target as passed in a HTTP request line or equivalent. */ - public static final AttributeKey HTTP_TARGET = stringKey("http.target"); - - /** - * The IP address of the original client behind all proxies, if known (e.g. from X-Forwarded-For). - * - *

    Notes: - * - *

      - *
    • This is not necessarily the same as {@code net.sock.peer.addr}, which would identify the - * network-level peer, which may be a proxy. - *
    • This attribute should be set when a source of information different from the one used for - * {@code net.sock.peer.addr}, is available even if that other source just confirms the same - * value as {@code net.sock.peer.addr}. Rationale: For {@code net.sock.peer.addr}, one - * typically does not know if it comes from a proxy, reverse proxy, or the actual client. - * Setting {@code http.client_ip} when it's the same as {@code net.sock.peer.addr} means - * that one is at least somewhat confident that the address is not that of the closest - * proxy. - *
    - */ - public static final AttributeKey HTTP_CLIENT_IP = stringKey("http.client_ip"); - - /** - * The AWS request ID as returned in the response headers {@code x-amz-request-id} or {@code - * x-amz-requestid}. - */ - public static final AttributeKey AWS_REQUEST_ID = stringKey("aws.request_id"); - - /** The keys in the {@code RequestItems} object field. */ - public static final AttributeKey> AWS_DYNAMODB_TABLE_NAMES = - stringArrayKey("aws.dynamodb.table_names"); - - /** The JSON-serialized value of each item in the {@code ConsumedCapacity} response field. */ - public static final AttributeKey> AWS_DYNAMODB_CONSUMED_CAPACITY = - stringArrayKey("aws.dynamodb.consumed_capacity"); - - /** The JSON-serialized value of the {@code ItemCollectionMetrics} response field. */ - public static final AttributeKey AWS_DYNAMODB_ITEM_COLLECTION_METRICS = - stringKey("aws.dynamodb.item_collection_metrics"); - - /** The value of the {@code ProvisionedThroughput.ReadCapacityUnits} request parameter. */ - public static final AttributeKey AWS_DYNAMODB_PROVISIONED_READ_CAPACITY = - doubleKey("aws.dynamodb.provisioned_read_capacity"); - - /** The value of the {@code ProvisionedThroughput.WriteCapacityUnits} request parameter. */ - public static final AttributeKey AWS_DYNAMODB_PROVISIONED_WRITE_CAPACITY = - doubleKey("aws.dynamodb.provisioned_write_capacity"); - - /** The value of the {@code ConsistentRead} request parameter. */ - public static final AttributeKey AWS_DYNAMODB_CONSISTENT_READ = - booleanKey("aws.dynamodb.consistent_read"); - - /** The value of the {@code ProjectionExpression} request parameter. */ - public static final AttributeKey AWS_DYNAMODB_PROJECTION = - stringKey("aws.dynamodb.projection"); - - /** The value of the {@code Limit} request parameter. */ - public static final AttributeKey AWS_DYNAMODB_LIMIT = longKey("aws.dynamodb.limit"); - - /** The value of the {@code AttributesToGet} request parameter. */ - public static final AttributeKey> AWS_DYNAMODB_ATTRIBUTES_TO_GET = - stringArrayKey("aws.dynamodb.attributes_to_get"); - - /** The value of the {@code IndexName} request parameter. */ - public static final AttributeKey AWS_DYNAMODB_INDEX_NAME = - stringKey("aws.dynamodb.index_name"); - - /** The value of the {@code Select} request parameter. */ - public static final AttributeKey AWS_DYNAMODB_SELECT = stringKey("aws.dynamodb.select"); - - /** The JSON-serialized value of each item of the {@code GlobalSecondaryIndexes} request field */ - public static final AttributeKey> AWS_DYNAMODB_GLOBAL_SECONDARY_INDEXES = - stringArrayKey("aws.dynamodb.global_secondary_indexes"); - - /** The JSON-serialized value of each item of the {@code LocalSecondaryIndexes} request field. */ - public static final AttributeKey> AWS_DYNAMODB_LOCAL_SECONDARY_INDEXES = - stringArrayKey("aws.dynamodb.local_secondary_indexes"); - - /** The value of the {@code ExclusiveStartTableName} request parameter. */ - public static final AttributeKey AWS_DYNAMODB_EXCLUSIVE_START_TABLE = - stringKey("aws.dynamodb.exclusive_start_table"); - - /** The the number of items in the {@code TableNames} response parameter. */ - public static final AttributeKey AWS_DYNAMODB_TABLE_COUNT = - longKey("aws.dynamodb.table_count"); - - /** The value of the {@code ScanIndexForward} request parameter. */ - public static final AttributeKey AWS_DYNAMODB_SCAN_FORWARD = - booleanKey("aws.dynamodb.scan_forward"); - - /** The value of the {@code Segment} request parameter. */ - public static final AttributeKey AWS_DYNAMODB_SEGMENT = longKey("aws.dynamodb.segment"); - - /** The value of the {@code TotalSegments} request parameter. */ - public static final AttributeKey AWS_DYNAMODB_TOTAL_SEGMENTS = - longKey("aws.dynamodb.total_segments"); - - /** The value of the {@code Count} response parameter. */ - public static final AttributeKey AWS_DYNAMODB_COUNT = longKey("aws.dynamodb.count"); - - /** The value of the {@code ScannedCount} response parameter. */ - public static final AttributeKey AWS_DYNAMODB_SCANNED_COUNT = - longKey("aws.dynamodb.scanned_count"); - - /** The JSON-serialized value of each item in the {@code AttributeDefinitions} request field. */ - public static final AttributeKey> AWS_DYNAMODB_ATTRIBUTE_DEFINITIONS = - stringArrayKey("aws.dynamodb.attribute_definitions"); - - /** - * The JSON-serialized value of each item in the the {@code GlobalSecondaryIndexUpdates} request - * field. - */ - public static final AttributeKey> AWS_DYNAMODB_GLOBAL_SECONDARY_INDEX_UPDATES = - stringArrayKey("aws.dynamodb.global_secondary_index_updates"); - - /** - * The S3 bucket name the request refers to. Corresponds to the {@code --bucket} parameter of the - * S3 API - * operations. - * - *

    Notes: - * - *

      - *
    • The {@code bucket} attribute is applicable to all S3 operations that reference a bucket, - * i.e. that require the bucket name as a mandatory parameter. This applies to almost all S3 - * operations except {@code list-buckets}. - *
    - */ - public static final AttributeKey AWS_S3_BUCKET = stringKey("aws.s3.bucket"); - - /** - * The S3 object key the request refers to. Corresponds to the {@code --key} parameter of the S3 API operations. - * - *

    Notes: - * - *

    - */ - public static final AttributeKey AWS_S3_KEY = stringKey("aws.s3.key"); - - /** - * The source object (in the form {@code bucket}/{@code key}) for the copy operation. - * - *

    Notes: - * - *

    - */ - public static final AttributeKey AWS_S3_COPY_SOURCE = stringKey("aws.s3.copy_source"); - - /** - * Upload ID that identifies the multipart upload. - * - *

    Notes: - * - *

    - */ - public static final AttributeKey AWS_S3_UPLOAD_ID = stringKey("aws.s3.upload_id"); - - /** - * The delete request container that specifies the objects to be deleted. - * - *

    Notes: - * - *

    - */ - public static final AttributeKey AWS_S3_DELETE = stringKey("aws.s3.delete"); - - /** - * The part number of the part being uploaded in a multipart-upload operation. This is a positive - * integer between 1 and 10,000. - * - *

    Notes: - * - *

    - */ - public static final AttributeKey AWS_S3_PART_NUMBER = longKey("aws.s3.part_number"); - - /** The name of the operation being executed. */ - public static final AttributeKey GRAPHQL_OPERATION_NAME = - stringKey("graphql.operation.name"); - - /** The type of the operation being executed. */ - public static final AttributeKey GRAPHQL_OPERATION_TYPE = - stringKey("graphql.operation.type"); - - /** - * The GraphQL document being executed. - * - *

    Notes: - * - *

      - *
    • The value may be sanitized to exclude sensitive information. - *
    - */ - public static final AttributeKey GRAPHQL_DOCUMENT = stringKey("graphql.document"); - - /** - * A value used by the messaging system as an identifier for the message, represented as a string. - */ - public static final AttributeKey MESSAGING_MESSAGE_ID = stringKey("messaging.message.id"); - - /** - * The conversation ID identifying the conversation to which the - * message belongs, represented as a string. Sometimes called "Correlation ID". - */ - public static final AttributeKey MESSAGING_MESSAGE_CONVERSATION_ID = - stringKey("messaging.message.conversation_id"); - - /** - * The (uncompressed) size of the message payload in bytes. Also use this attribute if it is - * unknown whether the compressed or uncompressed payload size is reported. - */ - public static final AttributeKey MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES = - longKey("messaging.message.payload_size_bytes"); - - /** The compressed size of the message payload in bytes. */ - public static final AttributeKey MESSAGING_MESSAGE_PAYLOAD_COMPRESSED_SIZE_BYTES = - longKey("messaging.message.payload_compressed_size_bytes"); - - /** - * The message destination name - * - *

    Notes: - * - *

      - *
    • Destination name SHOULD uniquely identify a specific queue, topic or other entity within - * the broker. If the broker does not have such notion, the destination name SHOULD uniquely - * identify the broker. - *
    - */ - public static final AttributeKey MESSAGING_DESTINATION_NAME = - stringKey("messaging.destination.name"); - - /** - * Low cardinality representation of the messaging destination name - * - *

    Notes: - * - *

      - *
    • Destination names could be constructed from templates. An example would be a destination - * name involving a user name or product id. Although the destination name in this case is - * of high cardinality, the underlying template is of low cardinality and can be effectively - * used for grouping and aggregation. - *
    - */ - public static final AttributeKey MESSAGING_DESTINATION_TEMPLATE = - stringKey("messaging.destination.template"); - - /** - * A boolean that is true if the message destination is temporary and might not exist anymore - * after messages are processed. - */ - public static final AttributeKey MESSAGING_DESTINATION_TEMPORARY = - booleanKey("messaging.destination.temporary"); - - /** - * A boolean that is true if the message destination is anonymous (could be unnamed or have - * auto-generated name). - */ - public static final AttributeKey MESSAGING_DESTINATION_ANONYMOUS = - booleanKey("messaging.destination.anonymous"); - - /** - * The message source name - * - *

    Notes: - * - *

      - *
    • Source name SHOULD uniquely identify a specific queue, topic, or other entity within the - * broker. If the broker does not have such notion, the source name SHOULD uniquely identify - * the broker. - *
    - */ - public static final AttributeKey MESSAGING_SOURCE_NAME = - stringKey("messaging.source.name"); - - /** - * Low cardinality representation of the messaging source name - * - *

    Notes: - * - *

      - *
    • Source names could be constructed from templates. An example would be a source name - * involving a user name or product id. Although the source name in this case is of high - * cardinality, the underlying template is of low cardinality and can be effectively used - * for grouping and aggregation. - *
    - */ - public static final AttributeKey MESSAGING_SOURCE_TEMPLATE = - stringKey("messaging.source.template"); - - /** - * A boolean that is true if the message source is temporary and might not exist anymore after - * messages are processed. - */ - public static final AttributeKey MESSAGING_SOURCE_TEMPORARY = - booleanKey("messaging.source.temporary"); - - /** - * A boolean that is true if the message source is anonymous (could be unnamed or have - * auto-generated name). - */ - public static final AttributeKey MESSAGING_SOURCE_ANONYMOUS = - booleanKey("messaging.source.anonymous"); - - /** A string identifying the messaging system. */ - public static final AttributeKey MESSAGING_SYSTEM = stringKey("messaging.system"); - - /** - * A string identifying the kind of messaging operation as defined in the Operation names section above. - * - *

    Notes: - * - *

      - *
    • If a custom value is used, it MUST be of low cardinality. - *
    - */ - public static final AttributeKey MESSAGING_OPERATION = stringKey("messaging.operation"); - - /** - * The number of messages sent, received, or processed in the scope of the batching operation. - * - *

    Notes: - * - *

      - *
    • Instrumentations SHOULD NOT set {@code messaging.batch.message_count} on spans that - * operate with a single message. When a messaging client library supports both batch and - * single-message API for the same operation, instrumentations SHOULD use {@code - * messaging.batch.message_count} for batching APIs and SHOULD NOT use it for single-message - * APIs. - *
    - */ - public static final AttributeKey MESSAGING_BATCH_MESSAGE_COUNT = - longKey("messaging.batch.message_count"); - - /** - * The identifier for the consumer receiving a message. For Kafka, set it to {@code - * {messaging.kafka.consumer.group} - {messaging.kafka.client_id}}, if both are present, or only - * {@code messaging.kafka.consumer.group}. For brokers, such as RabbitMQ and Artemis, set it to - * the {@code client_id} of the client consuming the message. - */ - public static final AttributeKey MESSAGING_CONSUMER_ID = - stringKey("messaging.consumer.id"); - - /** RabbitMQ message routing key. */ - public static final AttributeKey MESSAGING_RABBITMQ_DESTINATION_ROUTING_KEY = - stringKey("messaging.rabbitmq.destination.routing_key"); - - /** - * Message keys in Kafka are used for grouping alike messages to ensure they're processed on the - * same partition. They differ from {@code messaging.message.id} in that they're not unique. If - * the key is {@code null}, the attribute MUST NOT be set. - * - *

    Notes: - * - *

      - *
    • If the key type is not string, it's string representation has to be supplied for the - * attribute. If the key has no unambiguous, canonical string form, don't include its value. - *
    - */ - public static final AttributeKey MESSAGING_KAFKA_MESSAGE_KEY = - stringKey("messaging.kafka.message.key"); - - /** - * Name of the Kafka Consumer Group that is handling the message. Only applies to consumers, not - * producers. - */ - public static final AttributeKey MESSAGING_KAFKA_CONSUMER_GROUP = - stringKey("messaging.kafka.consumer.group"); - - /** Client Id for the Consumer or Producer that is handling the message. */ - public static final AttributeKey MESSAGING_KAFKA_CLIENT_ID = - stringKey("messaging.kafka.client_id"); - - /** Partition the message is sent to. */ - public static final AttributeKey MESSAGING_KAFKA_DESTINATION_PARTITION = - longKey("messaging.kafka.destination.partition"); - - /** Partition the message is received from. */ - public static final AttributeKey MESSAGING_KAFKA_SOURCE_PARTITION = - longKey("messaging.kafka.source.partition"); - - /** The offset of a record in the corresponding Kafka partition. */ - public static final AttributeKey MESSAGING_KAFKA_MESSAGE_OFFSET = - longKey("messaging.kafka.message.offset"); - - /** A boolean that is true if the message is a tombstone. */ - public static final AttributeKey MESSAGING_KAFKA_MESSAGE_TOMBSTONE = - booleanKey("messaging.kafka.message.tombstone"); - - /** Namespace of RocketMQ resources, resources in different namespaces are individual. */ - public static final AttributeKey MESSAGING_ROCKETMQ_NAMESPACE = - stringKey("messaging.rocketmq.namespace"); - - /** - * Name of the RocketMQ producer/consumer group that is handling the message. The client type is - * identified by the SpanKind. - */ - public static final AttributeKey MESSAGING_ROCKETMQ_CLIENT_GROUP = - stringKey("messaging.rocketmq.client_group"); - - /** The unique identifier for each client. */ - public static final AttributeKey MESSAGING_ROCKETMQ_CLIENT_ID = - stringKey("messaging.rocketmq.client_id"); - - /** - * The timestamp in milliseconds that the delay message is expected to be delivered to consumer. - */ - public static final AttributeKey MESSAGING_ROCKETMQ_MESSAGE_DELIVERY_TIMESTAMP = - longKey("messaging.rocketmq.message.delivery_timestamp"); - - /** The delay time level for delay message, which determines the message delay time. */ - public static final AttributeKey MESSAGING_ROCKETMQ_MESSAGE_DELAY_TIME_LEVEL = - longKey("messaging.rocketmq.message.delay_time_level"); - - /** - * It is essential for FIFO message. Messages that belong to the same message group are always - * processed one by one within the same consumer group. - */ - public static final AttributeKey MESSAGING_ROCKETMQ_MESSAGE_GROUP = - stringKey("messaging.rocketmq.message.group"); - - /** Type of message. */ - public static final AttributeKey MESSAGING_ROCKETMQ_MESSAGE_TYPE = - stringKey("messaging.rocketmq.message.type"); - - /** The secondary classifier of message besides topic. */ - public static final AttributeKey MESSAGING_ROCKETMQ_MESSAGE_TAG = - stringKey("messaging.rocketmq.message.tag"); - - /** Key(s) of message, another way to mark message besides message id. */ - public static final AttributeKey> MESSAGING_ROCKETMQ_MESSAGE_KEYS = - stringArrayKey("messaging.rocketmq.message.keys"); - - /** Model of message consumption. This only applies to consumer spans. */ - public static final AttributeKey MESSAGING_ROCKETMQ_CONSUMPTION_MODEL = - stringKey("messaging.rocketmq.consumption_model"); - - /** A string identifying the remoting system. See below for a list of well-known identifiers. */ - public static final AttributeKey RPC_SYSTEM = stringKey("rpc.system"); - - /** - * The full (logical) name of the service being called, including its package name, if applicable. - * - *

    Notes: - * - *

      - *
    • This is the logical name of the service from the RPC interface perspective, which can be - * different from the name of any implementing class. The {@code code.namespace} attribute - * may be used to store the latter (despite the attribute name, it may include a class name; - * e.g., class with method actually executing the call on the server side, RPC client stub - * class on the client side). - *
    - */ - public static final AttributeKey RPC_SERVICE = stringKey("rpc.service"); - - /** - * The name of the (logical) method being called, must be equal to the $method part in the span - * name. - * - *

    Notes: - * - *

      - *
    • This is the logical name of the method from the RPC interface perspective, which can be - * different from the name of any implementing method/function. The {@code code.function} - * attribute may be used to store the latter (e.g., method actually executing the call on - * the server side, RPC client stub method on the client side). - *
    - */ - public static final AttributeKey RPC_METHOD = stringKey("rpc.method"); - - /** - * The numeric status - * code of the gRPC request. - */ - public static final AttributeKey RPC_GRPC_STATUS_CODE = longKey("rpc.grpc.status_code"); - - /** - * Protocol version as in {@code jsonrpc} property of request/response. Since JSON-RPC 1.0 does - * not specify this, the value can be omitted. - */ - public static final AttributeKey RPC_JSONRPC_VERSION = stringKey("rpc.jsonrpc.version"); - - /** - * {@code id} property of request or response. Since protocol allows id to be int, string, {@code - * null} or missing (for notifications), value is expected to be cast to string for simplicity. - * Use empty string in case of {@code null} value. Omit entirely if this is a notification. - */ - public static final AttributeKey RPC_JSONRPC_REQUEST_ID = - stringKey("rpc.jsonrpc.request_id"); - - /** {@code error.code} property of response if it is an error response. */ - public static final AttributeKey RPC_JSONRPC_ERROR_CODE = longKey("rpc.jsonrpc.error_code"); - - /** {@code error.message} property of response if it is an error response. */ - public static final AttributeKey RPC_JSONRPC_ERROR_MESSAGE = - stringKey("rpc.jsonrpc.error_message"); - - /** Whether this is a received or sent message. */ - public static final AttributeKey MESSAGE_TYPE = stringKey("message.type"); - - /** - * MUST be calculated as two different counters starting from {@code 1} one for sent messages and - * one for received message. - * - *

    Notes: - * - *

      - *
    • This way we guarantee that the values will be consistent between different - * implementations. - *
    - */ - public static final AttributeKey MESSAGE_ID = longKey("message.id"); - - /** Compressed size of the message in bytes. */ - public static final AttributeKey MESSAGE_COMPRESSED_SIZE = - longKey("message.compressed_size"); - - /** Uncompressed size of the message in bytes. */ - public static final AttributeKey MESSAGE_UNCOMPRESSED_SIZE = - longKey("message.uncompressed_size"); - - /** - * The error codes of the Connect - * request. Error codes are always string values. - */ - public static final AttributeKey RPC_CONNECT_RPC_ERROR_CODE = - stringKey("rpc.connect_rpc.error_code"); - - /** - * SHOULD be set to true if the exception event is recorded at a point where it is known that the - * exception is escaping the scope of the span. - * - *

    Notes: - * - *

      - *
    • An exception is considered to have escaped (or left) the scope of a span, if that span is - * ended while the exception is still logically "in flight". This may be actually - * "in flight" in some languages (e.g. if the exception is passed to a Context - * manager's {@code __exit__} method in Python) but will usually be caught at the point of - * recording the exception in most languages. - *
    • It is usually not possible to determine at the point where an exception is thrown whether - * it will escape the scope of a span. However, it is trivial to know that an exception will - * escape, if one checks for an active exception just before ending the span, as done in the - * example above. - *
    • It follows that an exception may still escape the scope of the span even if the {@code - * exception.escaped} attribute was not set or set to false, since the event might have been - * recorded at a time where it was not clear whether the exception will escape. - *
    - */ - public static final AttributeKey EXCEPTION_ESCAPED = booleanKey("exception.escaped"); - - /** - * Value of the HTTP - * User-Agent header sent by the client. - */ - public static final AttributeKey USER_AGENT_ORIGINAL = stringKey("user_agent.original"); - - // Enum definitions - public static final class EventDomainValues { - /** Events from browser apps. */ - public static final String BROWSER = "browser"; - - /** Events from mobile apps. */ - public static final String DEVICE = "device"; - - /** Events from Kubernetes. */ - public static final String K8S = "k8s"; - - private EventDomainValues() {} - } - - public static final class OpentracingRefTypeValues { - /** The parent Span depends on the child Span in some capacity. */ - public static final String CHILD_OF = "child_of"; - - /** The parent Span does not depend in any way on the result of the child Span. */ - public static final String FOLLOWS_FROM = "follows_from"; - - private OpentracingRefTypeValues() {} - } - - public static final class DbSystemValues { - /** Some other SQL database. Fallback only. See notes. */ - public static final String OTHER_SQL = "other_sql"; - - /** Microsoft SQL Server. */ - public static final String MSSQL = "mssql"; - - /** Microsoft SQL Server Compact. */ - public static final String MSSQLCOMPACT = "mssqlcompact"; - - /** MySQL. */ - public static final String MYSQL = "mysql"; - - /** Oracle Database. */ - public static final String ORACLE = "oracle"; - - /** IBM Db2. */ - public static final String DB2 = "db2"; - - /** PostgreSQL. */ - public static final String POSTGRESQL = "postgresql"; - - /** Amazon Redshift. */ - public static final String REDSHIFT = "redshift"; - - /** Apache Hive. */ - public static final String HIVE = "hive"; - - /** Cloudscape. */ - public static final String CLOUDSCAPE = "cloudscape"; - - /** HyperSQL DataBase. */ - public static final String HSQLDB = "hsqldb"; - - /** Progress Database. */ - public static final String PROGRESS = "progress"; - - /** SAP MaxDB. */ - public static final String MAXDB = "maxdb"; - - /** SAP HANA. */ - public static final String HANADB = "hanadb"; - - /** Ingres. */ - public static final String INGRES = "ingres"; - - /** FirstSQL. */ - public static final String FIRSTSQL = "firstsql"; - - /** EnterpriseDB. */ - public static final String EDB = "edb"; - - /** InterSystems Caché. */ - public static final String CACHE = "cache"; - - /** Adabas (Adaptable Database System). */ - public static final String ADABAS = "adabas"; - - /** Firebird. */ - public static final String FIREBIRD = "firebird"; - - /** Apache Derby. */ - public static final String DERBY = "derby"; - - /** FileMaker. */ - public static final String FILEMAKER = "filemaker"; - - /** Informix. */ - public static final String INFORMIX = "informix"; - - /** InstantDB. */ - public static final String INSTANTDB = "instantdb"; - - /** InterBase. */ - public static final String INTERBASE = "interbase"; - - /** MariaDB. */ - public static final String MARIADB = "mariadb"; - - /** Netezza. */ - public static final String NETEZZA = "netezza"; - - /** Pervasive PSQL. */ - public static final String PERVASIVE = "pervasive"; - - /** PointBase. */ - public static final String POINTBASE = "pointbase"; - - /** SQLite. */ - public static final String SQLITE = "sqlite"; - - /** Sybase. */ - public static final String SYBASE = "sybase"; - - /** Teradata. */ - public static final String TERADATA = "teradata"; - - /** Vertica. */ - public static final String VERTICA = "vertica"; - - /** H2. */ - public static final String H2 = "h2"; - - /** ColdFusion IMQ. */ - public static final String COLDFUSION = "coldfusion"; - - /** Apache Cassandra. */ - public static final String CASSANDRA = "cassandra"; - - /** Apache HBase. */ - public static final String HBASE = "hbase"; - - /** MongoDB. */ - public static final String MONGODB = "mongodb"; - - /** Redis. */ - public static final String REDIS = "redis"; - - /** Couchbase. */ - public static final String COUCHBASE = "couchbase"; - - /** CouchDB. */ - public static final String COUCHDB = "couchdb"; - - /** Microsoft Azure Cosmos DB. */ - public static final String COSMOSDB = "cosmosdb"; - - /** Amazon DynamoDB. */ - public static final String DYNAMODB = "dynamodb"; - - /** Neo4j. */ - public static final String NEO4J = "neo4j"; - - /** Apache Geode. */ - public static final String GEODE = "geode"; - - /** Elasticsearch. */ - public static final String ELASTICSEARCH = "elasticsearch"; - - /** Memcached. */ - public static final String MEMCACHED = "memcached"; - - /** CockroachDB. */ - public static final String COCKROACHDB = "cockroachdb"; - - /** OpenSearch. */ - public static final String OPENSEARCH = "opensearch"; - - /** ClickHouse. */ - public static final String CLICKHOUSE = "clickhouse"; - - /** Cloud Spanner. */ - public static final String SPANNER = "spanner"; - - /** Trino. */ - public static final String TRINO = "trino"; - - private DbSystemValues() {} - } - - public static final class DbCassandraConsistencyLevelValues { - /** all. */ - public static final String ALL = "all"; - - /** each_quorum. */ - public static final String EACH_QUORUM = "each_quorum"; - - /** quorum. */ - public static final String QUORUM = "quorum"; - - /** local_quorum. */ - public static final String LOCAL_QUORUM = "local_quorum"; - - /** one. */ - public static final String ONE = "one"; - - /** two. */ - public static final String TWO = "two"; - - /** three. */ - public static final String THREE = "three"; - - /** local_one. */ - public static final String LOCAL_ONE = "local_one"; - - /** any. */ - public static final String ANY = "any"; - - /** serial. */ - public static final String SERIAL = "serial"; - - /** local_serial. */ - public static final String LOCAL_SERIAL = "local_serial"; - - private DbCassandraConsistencyLevelValues() {} - } - - public static final class DbCosmosdbOperationTypeValues { - /** invalid. */ - public static final String INVALID = "Invalid"; - - /** create. */ - public static final String CREATE = "Create"; - - /** patch. */ - public static final String PATCH = "Patch"; - - /** read. */ - public static final String READ = "Read"; - - /** read_feed. */ - public static final String READ_FEED = "ReadFeed"; - - /** delete. */ - public static final String DELETE = "Delete"; - - /** replace. */ - public static final String REPLACE = "Replace"; - - /** execute. */ - public static final String EXECUTE = "Execute"; - - /** query. */ - public static final String QUERY = "Query"; - - /** head. */ - public static final String HEAD = "Head"; - - /** head_feed. */ - public static final String HEAD_FEED = "HeadFeed"; - - /** upsert. */ - public static final String UPSERT = "Upsert"; - - /** batch. */ - public static final String BATCH = "Batch"; - - /** query_plan. */ - public static final String QUERY_PLAN = "QueryPlan"; - - /** execute_javascript. */ - public static final String EXECUTE_JAVASCRIPT = "ExecuteJavaScript"; - - private DbCosmosdbOperationTypeValues() {} - } - - public static final class DbCosmosdbConnectionModeValues { - /** Gateway (HTTP) connections mode. */ - public static final String GATEWAY = "gateway"; - - /** Direct connection. */ - public static final String DIRECT = "direct"; - - private DbCosmosdbConnectionModeValues() {} - } - - public static final class OtelStatusCodeValues { - /** - * The operation has been validated by an Application developer or Operator to have completed - * successfully. - */ - public static final String OK = "OK"; - - /** The operation contains an error. */ - public static final String ERROR = "ERROR"; - - private OtelStatusCodeValues() {} - } - - public static final class FaasTriggerValues { - /** A response to some data source operation such as a database or filesystem read/write. */ - public static final String DATASOURCE = "datasource"; - - /** To provide an answer to an inbound HTTP request. */ - public static final String HTTP = "http"; - - /** A function is set to be executed when messages are sent to a messaging system. */ - public static final String PUBSUB = "pubsub"; - - /** A function is scheduled to be executed regularly. */ - public static final String TIMER = "timer"; - - /** If none of the others apply. */ - public static final String OTHER = "other"; - - private FaasTriggerValues() {} - } - - public static final class FaasDocumentOperationValues { - /** When a new object is created. */ - public static final String INSERT = "insert"; - - /** When an object is modified. */ - public static final String EDIT = "edit"; - - /** When an object is deleted. */ - public static final String DELETE = "delete"; - - private FaasDocumentOperationValues() {} - } - - public static final class FaasInvokedProviderValues { - /** Alibaba Cloud. */ - public static final String ALIBABA_CLOUD = "alibaba_cloud"; - - /** Amazon Web Services. */ - public static final String AWS = "aws"; - - /** Microsoft Azure. */ - public static final String AZURE = "azure"; - - /** Google Cloud Platform. */ - public static final String GCP = "gcp"; - - /** Tencent Cloud. */ - public static final String TENCENT_CLOUD = "tencent_cloud"; - - private FaasInvokedProviderValues() {} - } - - public static final class NetTransportValues { - /** ip_tcp. */ - public static final String IP_TCP = "ip_tcp"; - - /** ip_udp. */ - public static final String IP_UDP = "ip_udp"; - - /** Named or anonymous pipe. See note below. */ - public static final String PIPE = "pipe"; - - /** In-process communication. */ - public static final String INPROC = "inproc"; - - /** Something else (non IP-based). */ - public static final String OTHER = "other"; - - /** - * @deprecated This item has been removed as of 1.13.0 of the semantic conventions. - */ - @Deprecated public static final String IP = "ip"; - - /** - * @deprecated This item has been removed as of 1.13.0 of the semantic conventions. - */ - @Deprecated public static final String UNIX = "unix"; - - private NetTransportValues() {} - } - - public static final class NetSockFamilyValues { - /** IPv4 address. */ - public static final String INET = "inet"; - - /** IPv6 address. */ - public static final String INET6 = "inet6"; - - /** Unix domain socket path. */ - public static final String UNIX = "unix"; - - private NetSockFamilyValues() {} - } - - public static final class NetHostConnectionTypeValues { - /** wifi. */ - public static final String WIFI = "wifi"; - - /** wired. */ - public static final String WIRED = "wired"; - - /** cell. */ - public static final String CELL = "cell"; - - /** unavailable. */ - public static final String UNAVAILABLE = "unavailable"; - - /** unknown. */ - public static final String UNKNOWN = "unknown"; - - private NetHostConnectionTypeValues() {} - } - - public static final class NetHostConnectionSubtypeValues { - /** GPRS. */ - public static final String GPRS = "gprs"; - - /** EDGE. */ - public static final String EDGE = "edge"; - - /** UMTS. */ - public static final String UMTS = "umts"; - - /** CDMA. */ - public static final String CDMA = "cdma"; - - /** EVDO Rel. 0. */ - public static final String EVDO_0 = "evdo_0"; - - /** EVDO Rev. A. */ - public static final String EVDO_A = "evdo_a"; - - /** CDMA2000 1XRTT. */ - public static final String CDMA2000_1XRTT = "cdma2000_1xrtt"; - - /** HSDPA. */ - public static final String HSDPA = "hsdpa"; - - /** HSUPA. */ - public static final String HSUPA = "hsupa"; - - /** HSPA. */ - public static final String HSPA = "hspa"; - - /** IDEN. */ - public static final String IDEN = "iden"; - - /** EVDO Rev. B. */ - public static final String EVDO_B = "evdo_b"; - - /** LTE. */ - public static final String LTE = "lte"; - - /** EHRPD. */ - public static final String EHRPD = "ehrpd"; - - /** HSPAP. */ - public static final String HSPAP = "hspap"; - - /** GSM. */ - public static final String GSM = "gsm"; - - /** TD-SCDMA. */ - public static final String TD_SCDMA = "td_scdma"; - - /** IWLAN. */ - public static final String IWLAN = "iwlan"; - - /** 5G NR (New Radio). */ - public static final String NR = "nr"; - - /** 5G NRNSA (New Radio Non-Standalone). */ - public static final String NRNSA = "nrnsa"; - - /** LTE CA. */ - public static final String LTE_CA = "lte_ca"; - - private NetHostConnectionSubtypeValues() {} - } - - public static final class GraphqlOperationTypeValues { - /** GraphQL query. */ - public static final String QUERY = "query"; - - /** GraphQL mutation. */ - public static final String MUTATION = "mutation"; - - /** GraphQL subscription. */ - public static final String SUBSCRIPTION = "subscription"; - - private GraphqlOperationTypeValues() {} - } - - public static final class MessagingOperationValues { - /** publish. */ - public static final String PUBLISH = "publish"; - - /** receive. */ - public static final String RECEIVE = "receive"; - - /** process. */ - public static final String PROCESS = "process"; - - private MessagingOperationValues() {} - } - - public static final class MessagingRocketmqMessageTypeValues { - /** Normal message. */ - public static final String NORMAL = "normal"; - - /** FIFO message. */ - public static final String FIFO = "fifo"; - - /** Delay message. */ - public static final String DELAY = "delay"; - - /** Transaction message. */ - public static final String TRANSACTION = "transaction"; - - private MessagingRocketmqMessageTypeValues() {} - } - - public static final class MessagingRocketmqConsumptionModelValues { - /** Clustering consumption model. */ - public static final String CLUSTERING = "clustering"; - - /** Broadcasting consumption model. */ - public static final String BROADCASTING = "broadcasting"; - - private MessagingRocketmqConsumptionModelValues() {} - } - - public static final class RpcSystemValues { - /** gRPC. */ - public static final String GRPC = "grpc"; - - /** Java RMI. */ - public static final String JAVA_RMI = "java_rmi"; - - /** .NET WCF. */ - public static final String DOTNET_WCF = "dotnet_wcf"; - - /** Apache Dubbo. */ - public static final String APACHE_DUBBO = "apache_dubbo"; - - /** Connect RPC. */ - public static final String CONNECT_RPC = "connect_rpc"; - - private RpcSystemValues() {} - } - - public static final class RpcGrpcStatusCodeValues { - /** OK. */ - public static final long OK = 0; - - /** CANCELLED. */ - public static final long CANCELLED = 1; - - /** UNKNOWN. */ - public static final long UNKNOWN = 2; - - /** INVALID_ARGUMENT. */ - public static final long INVALID_ARGUMENT = 3; - - /** DEADLINE_EXCEEDED. */ - public static final long DEADLINE_EXCEEDED = 4; - - /** NOT_FOUND. */ - public static final long NOT_FOUND = 5; - - /** ALREADY_EXISTS. */ - public static final long ALREADY_EXISTS = 6; - - /** PERMISSION_DENIED. */ - public static final long PERMISSION_DENIED = 7; - - /** RESOURCE_EXHAUSTED. */ - public static final long RESOURCE_EXHAUSTED = 8; - - /** FAILED_PRECONDITION. */ - public static final long FAILED_PRECONDITION = 9; - - /** ABORTED. */ - public static final long ABORTED = 10; - - /** OUT_OF_RANGE. */ - public static final long OUT_OF_RANGE = 11; - - /** UNIMPLEMENTED. */ - public static final long UNIMPLEMENTED = 12; - - /** INTERNAL. */ - public static final long INTERNAL = 13; - - /** UNAVAILABLE. */ - public static final long UNAVAILABLE = 14; - - /** DATA_LOSS. */ - public static final long DATA_LOSS = 15; - - /** UNAUTHENTICATED. */ - public static final long UNAUTHENTICATED = 16; - - private RpcGrpcStatusCodeValues() {} - } - - public static final class MessageTypeValues { - /** sent. */ - public static final String SENT = "SENT"; - - /** received. */ - public static final String RECEIVED = "RECEIVED"; - - private MessageTypeValues() {} - } - - public static final class RpcConnectRpcErrorCodeValues { - /** cancelled. */ - public static final String CANCELLED = "cancelled"; - - /** unknown. */ - public static final String UNKNOWN = "unknown"; - - /** invalid_argument. */ - public static final String INVALID_ARGUMENT = "invalid_argument"; - - /** deadline_exceeded. */ - public static final String DEADLINE_EXCEEDED = "deadline_exceeded"; - - /** not_found. */ - public static final String NOT_FOUND = "not_found"; - - /** already_exists. */ - public static final String ALREADY_EXISTS = "already_exists"; - - /** permission_denied. */ - public static final String PERMISSION_DENIED = "permission_denied"; - - /** resource_exhausted. */ - public static final String RESOURCE_EXHAUSTED = "resource_exhausted"; - - /** failed_precondition. */ - public static final String FAILED_PRECONDITION = "failed_precondition"; - - /** aborted. */ - public static final String ABORTED = "aborted"; - - /** out_of_range. */ - public static final String OUT_OF_RANGE = "out_of_range"; - - /** unimplemented. */ - public static final String UNIMPLEMENTED = "unimplemented"; - - /** internal. */ - public static final String INTERNAL = "internal"; - - /** unavailable. */ - public static final String UNAVAILABLE = "unavailable"; - - /** data_loss. */ - public static final String DATA_LOSS = "data_loss"; - - /** unauthenticated. */ - public static final String UNAUTHENTICATED = "unauthenticated"; - - private RpcConnectRpcErrorCodeValues() {} - } - - // Manually defined and not YET in the YAML - /** - * The name of an event describing an exception. - * - *

    Typically an event with that name should not be manually created. Instead {@link - * io.opentelemetry.api.trace.Span#recordException(Throwable)} should be used. - */ - public static final String EXCEPTION_EVENT_NAME = "exception"; - - /** - * The name of the keyspace being accessed. - * - * @deprecated this item has been removed as of 1.8.0 of the semantic conventions. Please use - * {@link SemanticAttributes#DB_NAME} instead. - */ - @Deprecated - public static final AttributeKey DB_CASSANDRA_KEYSPACE = - stringKey("db.cassandra.keyspace"); - - /** - * The HBase namespace being accessed. - * - * @deprecated this item has been removed as of 1.8.0 of the semantic conventions. Please use - * {@link SemanticAttributes#DB_NAME} instead. - */ - @Deprecated - public static final AttributeKey DB_HBASE_NAMESPACE = stringKey("db.hbase.namespace"); - - /** - * The size of the uncompressed request payload body after transport decoding. Not set if - * transport encoding not used. - * - * @deprecated this item has been removed as of 1.13.0 of the semantic conventions. Please use - * {@link SemanticAttributes#HTTP_REQUEST_CONTENT_LENGTH} instead. - */ - @Deprecated - public static final AttributeKey HTTP_REQUEST_CONTENT_LENGTH_UNCOMPRESSED = - longKey("http.request_content_length_uncompressed"); - - /** - * @deprecated This item has been removed as of 1.13.0 of the semantic conventions. Please use - * {@link SemanticAttributes#HTTP_RESPONSE_CONTENT_LENGTH} instead. - */ - @Deprecated - public static final AttributeKey HTTP_RESPONSE_CONTENT_LENGTH_UNCOMPRESSED = - longKey("http.response_content_length_uncompressed"); - - /** - * @deprecated This item has been removed as of 1.13.0 of the semantic conventions. Please use - * {@link SemanticAttributes#NET_HOST_NAME} instead. - */ - @Deprecated - public static final AttributeKey HTTP_SERVER_NAME = stringKey("http.server_name"); - - /** - * @deprecated This item has been removed as of 1.13.0 of the semantic conventions. Please use - * {@link SemanticAttributes#NET_HOST_NAME} instead. - */ - @Deprecated public static final AttributeKey HTTP_HOST = stringKey("http.host"); - - /** - * @deprecated This item has been removed as of 1.13.0 of the semantic conventions. Please use - * {@link SemanticAttributes#NET_SOCK_PEER_ADDR} instead. - */ - @Deprecated public static final AttributeKey NET_PEER_IP = stringKey("net.peer.ip"); - - /** - * @deprecated This item has been removed as of 1.13.0 of the semantic conventions. Please use - * {@link SemanticAttributes#NET_SOCK_HOST_ADDR} instead. - */ - @Deprecated public static final AttributeKey NET_HOST_IP = stringKey("net.host.ip"); - - /** - * The ordinal number of request re-sending attempt. - * - * @deprecated This item has been removed as of 1.15.0 of the semantic conventions. Use {@link - * SemanticAttributes#HTTP_RESEND_COUNT} instead. - */ - @Deprecated public static final AttributeKey HTTP_RETRY_COUNT = longKey("http.retry_count"); - - /** - * A string identifying the messaging system. - * - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link - * SemanticAttributes#MESSAGING_DESTINATION_NAME} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_DESTINATION = - stringKey("messaging.destination"); - - /** - * A boolean that is true if the message destination is temporary. - * - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link - * SemanticAttributes#MESSAGING_DESTINATION_TEMPORARY} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_TEMP_DESTINATION = - booleanKey("messaging.temp_destination"); - - /** - * The name of the transport protocol. - * - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link - * SemanticAttributes#NET_PROTOCOL_NAME} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_PROTOCOL = stringKey("messaging.protocol"); - - /** - * The version of the transport protocol. - * - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link - * SemanticAttributes#NET_PROTOCOL_VERSION} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_PROTOCOL_VERSION = - stringKey("messaging.protocol_version"); - - /** - * Connection string. - * - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. There is no - * replacement. - */ - @Deprecated public static final AttributeKey MESSAGING_URL = stringKey("messaging.url"); - - /** - * The conversation ID identifying the conversation to which the - * message belongs, represented as a string. Sometimes called "Correlation ID". - * - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link - * SemanticAttributes#MESSAGING_MESSAGE_CONVERSATION_ID} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_CONVERSATION_ID = - stringKey("messaging.conversation_id"); - - /** - * RabbitMQ message routing key. - * - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link - * SemanticAttributes#MESSAGING_RABBITMQ_DESTINATION_ROUTING_KEY} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_RABBITMQ_ROUTING_KEY = - stringKey("messaging.rabbitmq.routing_key"); - - /** - * Partition the message is received from. - * - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link - * SemanticAttributes#MESSAGING_KAFKA_SOURCE_PARTITION} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_KAFKA_PARTITION = - longKey("messaging.kafka.partition"); - - /** - * A boolean that is true if the message is a tombstone. - * - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link - * SemanticAttributes#MESSAGING_KAFKA_MESSAGE_TOMBSTONE} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_KAFKA_TOMBSTONE = - booleanKey("messaging.kafka.tombstone"); - - /** - * The timestamp in milliseconds that the delay message is expected to be delivered to consumer. - * - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link - * SemanticAttributes#MESSAGING_ROCKETMQ_MESSAGE_DELIVERY_TIMESTAMP} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_ROCKETMQ_DELIVERY_TIMESTAMP = - longKey("messaging.rocketmq.delivery_timestamp"); - - /** - * The delay time level for delay message, which determines the message delay time. - * - * @deprecated This item has been removed as of 1.17.0 of the semantic conventions. Use {@link - * SemanticAttributes#MESSAGING_ROCKETMQ_MESSAGE_DELAY_TIME_LEVEL} instead. - */ - @Deprecated - public static final AttributeKey MESSAGING_ROCKETMQ_DELAY_TIME_LEVEL = - longKey("messaging.rocketmq.delay_time_level"); - - /** - * The name of the instrumentation scope - ({@code InstrumentationScope.Name} in OTLP). - * - * @deprecated This item has been moved, use {@link - * io.opentelemetry.semconv.resource.attributes.ResourceAttributes#OTEL_SCOPE_NAME} instead. - */ - @Deprecated - public static final AttributeKey OTEL_SCOPE_NAME = stringKey("otel.scope.name"); - - /** - * The version of the instrumentation scope - ({@code InstrumentationScope.Version} in OTLP). - * - * @deprecated This item has been moved, use {@link - * io.opentelemetry.semconv.resource.attributes.ResourceAttributes#OTEL_SCOPE_VERSION} - * instead. - */ - @Deprecated - public static final AttributeKey OTEL_SCOPE_VERSION = stringKey("otel.scope.version"); - - /** - * The execution ID of the current function execution. - * - * @deprecated This item has been renamed in 1.19.0 version of the semantic conventions. Use - * {@link SemanticAttributes#FAAS_INVOCATION_ID} instead. - */ - @Deprecated public static final AttributeKey FAAS_EXECUTION = stringKey("faas.execution"); - - /** - * Value of the HTTP - * User-Agent header sent by the client. - * - * @deprecated This item has been renamed in 1.19.0 version of the semantic conventions. Use - * {@link SemanticAttributes#USER_AGENT_ORIGINAL} instead. - */ - @Deprecated - public static final AttributeKey HTTP_USER_AGENT = stringKey("http.user_agent"); - - /** - * Deprecated. - * - * @deprecated Deprecated, use the {@link - * io.opentelemetry.semconv.resource.attributes.ResourceAttributes#OTEL_SCOPE_NAME} attribute. - */ - @Deprecated - public static final AttributeKey OTEL_LIBRARY_NAME = stringKey("otel.library.name"); - - /** - * Deprecated. - * - * @deprecated Deprecated, use the {@link - * io.opentelemetry.semconv.resource.attributes.ResourceAttributes#OTEL_SCOPE_VERSION} - * attribute. - */ - @Deprecated - public static final AttributeKey OTEL_LIBRARY_VERSION = stringKey("otel.library.version"); - - /** - * Kind of HTTP protocol used. - * - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. - */ - @Deprecated public static final AttributeKey HTTP_FLAVOR = stringKey("http.flavor"); - - /** - * Enum definitions for {@link #HTTP_FLAVOR}. - * - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. - */ - @Deprecated - public static final class HttpFlavorValues { - /** HTTP/1.0. */ - public static final String HTTP_1_0 = "1.0"; - - /** HTTP/1.1. */ - public static final String HTTP_1_1 = "1.1"; - - /** HTTP/2. */ - public static final String HTTP_2_0 = "2.0"; - - /** HTTP/3. */ - public static final String HTTP_3_0 = "3.0"; - - /** SPDY protocol. */ - public static final String SPDY = "SPDY"; - - /** QUIC protocol. */ - public static final String QUIC = "QUIC"; - - private HttpFlavorValues() {} - } - - /** - * Application layer protocol used. The value SHOULD be normalized to lowercase. - * - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. Use {@link - * SemanticAttributes#NET_PROTOCOL_NAME} instead. - */ - @Deprecated - public static final AttributeKey NET_APP_PROTOCOL_NAME = - stringKey("net.app.protocol.name"); - - /** - * Version of the application layer protocol used. See note below. - * - *

    Notes: - * - *

      - *
    • {@code net.app.protocol.version} refers to the version of the protocol used and might be - * different from the protocol client's version. If the HTTP client used has a version of - * {@code 0.27.2}, but sends HTTP version {@code 1.1}, this attribute should be set to - * {@code 1.1}. - *
    - * - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. Use {@link - * SemanticAttributes#NET_PROTOCOL_VERSION} instead. - */ - @Deprecated - public static final AttributeKey NET_APP_PROTOCOL_VERSION = - stringKey("net.app.protocol.version"); - - /** - * The kind of message destination. - * - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. - */ - @Deprecated - public static final AttributeKey MESSAGING_DESTINATION_KIND = - stringKey("messaging.destination.kind"); - - /** - * Enum values for {@link #MESSAGING_DESTINATION_KIND}. - * - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. - */ - @Deprecated - public static final class MessagingDestinationKindValues { - /** A message sent to a queue. */ - public static final String QUEUE = "queue"; - - /** A message sent to a topic. */ - public static final String TOPIC = "topic"; - - private MessagingDestinationKindValues() {} - } - - /** - * The kind of message source. - * - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. - */ - @Deprecated - public static final AttributeKey MESSAGING_SOURCE_KIND = - stringKey("messaging.source.kind"); - - /** - * Enum values for {@link #MESSAGING_SOURCE_KIND}. - * - * @deprecated This item has been removed as of 1.20.0 of the semantic conventions. - */ - @Deprecated - public static final class MessagingSourceKindValues { - /** A message received from a queue. */ - public static final String QUEUE = "queue"; - - /** A message received from a topic. */ - public static final String TOPIC = "topic"; - - private MessagingSourceKindValues() {} - } - - private SemanticAttributes() {} -} diff --git a/semconv/src/main/java/io/opentelemetry/semconv/trace/attributes/package-info.java b/semconv/src/main/java/io/opentelemetry/semconv/trace/attributes/package-info.java deleted file mode 100644 index 15f7b0cb460..00000000000 --- a/semconv/src/main/java/io/opentelemetry/semconv/trace/attributes/package-info.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -/** - * OpenTelemetry semantic attributes for traces. - * - * @see io.opentelemetry.semconv.trace.attributes.SemanticAttributes - */ -@ParametersAreNonnullByDefault -package io.opentelemetry.semconv.trace.attributes; - -import javax.annotation.ParametersAreNonnullByDefault; diff --git a/settings.gradle.kts b/settings.gradle.kts index cc4903adc9c..59803cb6497 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -1,21 +1,22 @@ pluginManagement { plugins { - id("com.github.johnrengelman.shadow") version "8.1.1" - id("com.gradle.enterprise") version "3.14.1" - id("de.undercouch.download") version "5.5.0" - id("org.jsonschema2pojo") version "1.2.1" - id("io.github.gradle-nexus.publish-plugin") version "1.3.0" - id("org.graalvm.buildtools.native") version "0.9.25" + id("com.gradleup.shadow") version "8.3.6" + id("com.gradle.develocity") version "3.19.2" + id("de.undercouch.download") version "5.6.0" + id("org.jsonschema2pojo") version "1.2.2" + id("io.github.gradle-nexus.publish-plugin") version "2.0.0" + id("org.graalvm.buildtools.native") version "0.10.6" } } plugins { - id("com.gradle.enterprise") + id("com.gradle.develocity") } dependencyResolutionManagement { repositories { mavenCentral() + google() mavenLocal() } } @@ -23,26 +24,25 @@ dependencyResolutionManagement { rootProject.name = "opentelemetry-java" include(":all") include(":api:all") -include(":api:events") -include(":semconv") +include(":api:incubator") +include(":api:testing-internal") include(":bom") include(":bom-alpha") include(":context") +include(":custom-checks") include(":dependencyManagement") -include(":extensions:incubator") include(":extensions:kotlin") include(":extensions:trace-propagators") include(":exporters:common") +include(":exporters:common:compile-stub") include(":exporters:sender:grpc-managed-channel") include(":exporters:sender:jdk") include(":exporters:sender:okhttp") -include(":exporters:jaeger") -include(":exporters:jaeger-proto") -include(":exporters:jaeger-thrift") include(":exporters:logging") include(":exporters:logging-otlp") include(":exporters:otlp:all") include(":exporters:otlp:common") +include(":exporters:otlp:profiles") include(":exporters:otlp:testing-internal") include(":exporters:prometheus") include(":exporters:zipkin") @@ -50,6 +50,7 @@ include(":integration-tests") include(":integration-tests:otlp") include(":integration-tests:tracecontext") include(":integration-tests:graal") +include(":integration-tests:graal-incubating") include(":opencensus-shim") include(":opentracing-shim") include(":perf-harness") @@ -65,43 +66,12 @@ include(":sdk-extensions:autoconfigure-spi") include(":sdk-extensions:incubator") include(":sdk-extensions:jaeger-remote-sampler") include(":testing-internal") +include(":animal-sniffer-signature") -val gradleEnterpriseServer = "https://ge.opentelemetry.io" -val isCI = System.getenv("CI") != null -val geAccessKey = System.getenv("GRADLE_ENTERPRISE_ACCESS_KEY") ?: "" - -// if GE access key is not given and we are in CI, then we publish to scans.gradle.com -val useScansGradleCom = isCI && geAccessKey.isEmpty() - -if (useScansGradleCom) { - gradleEnterprise { - buildScan { - termsOfServiceUrl = "https://gradle.com/terms-of-service" - termsOfServiceAgree = "yes" - isUploadInBackground = !isCI - publishAlways() - - capture { - isTaskInputFiles = true - } - } - } -} else { - gradleEnterprise { - server = gradleEnterpriseServer - buildScan { - isUploadInBackground = !isCI - - this as com.gradle.enterprise.gradleplugin.internal.extension.BuildScanExtensionWithHiddenFeatures - publishIfAuthenticated() - publishAlways() - - capture { - isTaskInputFiles = true - } - - gradle.startParameter.projectProperties["testJavaVersion"]?.let { tag(it) } - gradle.startParameter.projectProperties["testJavaVM"]?.let { tag(it) } - } +develocity { + buildScan { + publishing.onlyIf { System.getenv("CI") != null } + termsOfUseUrl.set("https://gradle.com/help/legal-terms-of-use") + termsOfUseAgree.set("yes") } } diff --git a/version.gradle.kts b/version.gradle.kts index 40a536f0418..7ddd8b3d82f 100644 --- a/version.gradle.kts +++ b/version.gradle.kts @@ -1,7 +1,7 @@ -val snapshot = true +val snapshot = false allprojects { - var ver = "1.30.0" + var ver = "1.49.0" val release = findProperty("otel.release") if (release != null) { ver += "-" + release