diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/.github/ci-prerequisites-atlas.sh b/.github/ci-prerequisites-atlas.sh
new file mode 100755
index 000000000000..b0a16f22b1e8
--- /dev/null
+++ b/.github/ci-prerequisites-atlas.sh
@@ -0,0 +1,6 @@
+# Reclaims disk space and sanitizes user home on Atlas infrastructure
+
+# We use the GitHub cache for the relevant parts of these directories.
+# Also, we do not want to keep things like ~/.gradle/build-scan-data.
+rm -rf ~/.gradle/
+rm -rf ~/.m2/
diff --git a/.github/workflows/atlas.yml b/.github/workflows/atlas.yml
deleted file mode 100644
index afd8892e7cf8..000000000000
--- a/.github/workflows/atlas.yml
+++ /dev/null
@@ -1,118 +0,0 @@
-# The main CI of Hibernate ORM is https://ci.hibernate.org/job/hibernate-orm-pipeline/.
-# However, Hibernate ORM builds run on GitHub actions regularly
-# to check that it still works and can be used in GitHub forks.
-# See https://docs.github.com/en/free-pro-team@latest/actions
-# for more information about GitHub actions.
-
-name: Hibernate ORM build-Atlas
-
-on:
- push:
- branches:
- - 'main'
- # WARNING: Using pull_request_target to access secrets, but we check out the PR head commit.
- # See checkout action for details.
- pull_request_target:
- branches:
- - 'main'
-
-permissions: {} # none
-
-# See https://github.com/hibernate/hibernate-orm/pull/4615 for a description of the behavior we're getting.
-concurrency:
- # Consider that two builds are in the same concurrency group (cannot run concurrently)
- # if they use the same workflow and are about the same branch ("ref") or pull request.
- group: "workflow = ${{ github.workflow }}, ref = ${{ github.event.ref }}, pr = ${{ github.event.pull_request.id }}"
- # Cancel previous builds in the same concurrency group even if they are in process
- # for pull requests or pushes to forks (not the upstream repository).
- cancel-in-progress: ${{ github.event_name == 'pull_request_target' || github.repository != 'hibernate/hibernate-orm' }}
-
-jobs:
- build:
- permissions:
- contents: read
- name: Java 11
- # runs-on: ubuntu-latest
- runs-on: [self-hosted, Linux, X64, OCI]
- strategy:
- fail-fast: false
- matrix:
- include:
- - rdbms: oracle_atps
- - rdbms: oracle_db19c
- - rdbms: oracle_db21c
- - rdbms: oracle_db23c
- steps:
- - name: Check out commit already pushed to branch
- if: "! github.event.pull_request.number"
- uses: actions/checkout@v4
- with:
- persist-credentials: false
- - name: Check out PR head
- uses: actions/checkout@v4
- if: github.event.pull_request.number
- with:
- # WARNING: This is potentially dangerous since we're checking out unreviewed code,
- # and since we're using the pull_request_target event we can use secrets.
- # Thus, we must be extra careful to never expose secrets to steps that execute this code,
- # and to strictly limit our of secrets to those that only pose minor security threats.
- # This means in particular we won't expose Develocity credentials to the main gradle executions,
- # but instead will execute gradle a second time just to push build scans to Develocity;
- # see below.
- ref: "refs/pull/${{ github.event.pull_request.number }}/head"
- persist-credentials: false
- - name: Reclaim Disk Space
- run: .github/ci-prerequisites.sh
- - name: Start database
- env:
- RDBMS: ${{ matrix.rdbms }}
- RUNID: ${{ github.run_number }}
- run: ci/database-start.sh
- - name: Set up Java 11
- uses: graalvm/setup-graalvm@v1
- with:
- distribution: 'graalvm'
- java-version: '21'
- - name: Get year/month for cache key
- id: get-date
- run: echo "yearmonth=$(/bin/date -u "+%Y-%m")" >> $GITHUB_OUTPUT
- shell: bash
- - name: Cache Maven local repository
- uses: actions/cache@v4
- id: cache-maven
- with:
- path: |
- ~/.m2/repository
- ~/.gradle/caches/
- ~/.gradle/wrapper/
- # refresh cache every month to avoid unlimited growth
- key: maven-localrepo-${{ steps.get-date.outputs.yearmonth }}
- - name: Run build script
- env:
- RDBMS: ${{ matrix.rdbms }}
- RUNID: ${{ github.run_number }}
- # WARNING: exposes secrets, so must only be passed to a step that doesn't run unapproved code.
- # WARNING: As this runs on untrusted nodes, we use the same access key as for PRs:
- # it has limited access, essentially it can only push build scans.
- DEVELOCITY_ACCESS_KEY: "${{ github.event_name == 'push' && secrets.GRADLE_ENTERPRISE_ACCESS_KEY_PR || '' }}"
- run: ./ci/build-github.sh
- shell: bash
- - name: Publish Develocity build scan for previous build
- # Don't fail a build if publishing fails
- continue-on-error: true
- if: "${{ !cancelled() && github.event_name == 'pull_request_target' && github.repository == 'hibernate/hibernate-orm' }}"
- run: |
- ./gradlew buildScanPublishPrevious
- env:
- # WARNING: exposes secrets, so must only be passed to a step that doesn't run unapproved code.
- DEVELOCITY_ACCESS_KEY: ${{ secrets.GRADLE_ENTERPRISE_ACCESS_KEY_PR }}
- - name: Upload test reports (if Gradle failed)
- uses: actions/upload-artifact@v4
- if: failure()
- with:
- name: test-reports-java11-${{ matrix.rdbms }}
- path: |
- ./**/target/reports/tests/
- ./**/target/reports/checkstyle/
- - name: Omit produced artifacts from build cache
- run: ./ci/before-cache.sh
\ No newline at end of file
diff --git a/.github/workflows/ci-report.yml b/.github/workflows/ci-report.yml
new file mode 100644
index 000000000000..daf9bdd17275
--- /dev/null
+++ b/.github/workflows/ci-report.yml
@@ -0,0 +1,197 @@
+name: GH Actions CI reporting
+
+on:
+ workflow_run:
+ workflows: [ "GH Actions CI" ]
+ types: [ completed ]
+
+defaults:
+ run:
+ shell: bash
+
+jobs:
+ publish-build-scans:
+ name: Publish Develocity build scans
+ if: github.repository == 'hibernate/hibernate-orm' && github.event.workflow_run.conclusion != 'cancelled'
+ runs-on: ubuntu-latest
+ steps:
+ # Checkout target branch which has trusted code
+ - name: Check out target branch
+ uses: actions/checkout@v4
+ with:
+ persist-credentials: false
+ ref: ${{ github.ref }}
+ - name: Set up Java 11
+ uses: actions/setup-java@v4
+ with:
+ distribution: 'temurin'
+ java-version: '11'
+
+ - name: Generate cache key
+ id: cache-key
+ run: |
+ CURRENT_BRANCH="${{ github.repository != 'hibernate/hibernate-orm' && 'fork' || github.base_ref || github.ref_name }}"
+ CURRENT_MONTH=$(/bin/date -u "+%Y-%m")
+ CURRENT_DAY=$(/bin/date -u "+%d")
+ ROOT_CACHE_KEY="buildtool-cache"
+ echo "buildtool-monthly-cache-key=${ROOT_CACHE_KEY}-${CURRENT_MONTH}" >> $GITHUB_OUTPUT
+ echo "buildtool-monthly-branch-cache-key=${ROOT_CACHE_KEY}-${CURRENT_MONTH}-${CURRENT_BRANCH}" >> $GITHUB_OUTPUT
+ echo "buildtool-cache-key=${ROOT_CACHE_KEY}-${CURRENT_MONTH}-${CURRENT_BRANCH}-${CURRENT_DAY}" >> $GITHUB_OUTPUT
+ - name: Restore Maven/Gradle Dependency/Dist Caches
+ uses: actions/cache/restore@v4
+ with:
+ path: |
+ ~/.m2/repository/
+ ~/.m2/wrapper/
+ ~/.gradle/caches/modules-2
+ ~/.gradle/wrapper/
+ key: ${{ steps.cache-key.outputs.buildtool-cache-key }}
+ restore-keys: |
+ ${{ steps.cache-key.outputs.buildtool-monthly-branch-cache-key }}-
+ ${{ steps.cache-key.outputs.buildtool-monthly-cache-key }}-
+
+ - name: Download GitHub Actions artifacts for the Develocity build scans
+ id: downloadBuildScan
+ uses: actions/download-artifact@v4
+ with:
+ pattern: build-scan-data-*
+ github-token: ${{ github.token }}
+ repository: ${{ github.repository }}
+ run-id: ${{ github.event.workflow_run.id }}
+ path: /tmp/downloaded-build-scan-data/
+ # Don't fail the build if there are no matching artifacts
+ continue-on-error: true
+ - name: Publish Develocity build scans for previous builds
+ if: ${{ steps.downloadBuildScan.outcome != 'failure'}}
+ run: |
+ shopt -s nullglob # Don't run the loop below if there are no artifacts
+ status=0
+ mkdir -p ~/.gradle/
+ for build_scan_data_directory in /tmp/downloaded-build-scan-data/*
+ do
+ rm -rf ~/.gradle/build-scan-data
+ mv "$build_scan_data_directory" ~/.gradle/build-scan-data \
+ && ./gradlew --no-build-cache buildScanPublishPrevious || status=1
+ done
+ exit $status
+ env:
+ DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY_PR }}
+
+ publish-sonar-scans:
+ name: Publish Sonar scan
+ if: github.repository == 'hibernate/hibernate-orm' && github.event.workflow_run.conclusion != 'cancelled'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Determine the Branch Reference for which the original action was triggered
+ id: determine_branch_ref
+ env:
+ GH_TOKEN: ${{ github.token }}
+ run: |
+ if [ "${{ github.event.workflow_run.event }}" == "pull_request" ]; then
+ echo "::notice::Triggering workflow was executed for a pull request"
+
+ FORK_OWNER="${{ github.event.workflow_run.head_repository.owner.login }}"
+ BRANCH_NAME="${{ github.event.workflow_run.head_branch }}"
+ if [ "${{ github.event.workflow_run.head_repository.owner.login }}" != "${{ github.event.workflow_run.repository.owner.login }}" ]; then
+ BRANCH_NAME="$FORK_OWNER:$BRANCH_NAME"
+ fi
+ GH_RESPONSE=$(gh pr view "$BRANCH_NAME" --repo ${{ github.event.workflow_run.repository.full_name }} --json number,baseRefName)
+ TARGET_BRANCH=$(echo $GH_RESPONSE | jq -r '.baseRefName')
+ PR_ID=$(echo $GH_RESPONSE | jq -r '.number')
+
+ echo "::notice::PR found. Target branch is: $TARGET_BRANCH"
+ echo "::notice:: Pull Request number is: $PR_ID"
+ echo "original_branch_ref=$TARGET_BRANCH" >> "$GITHUB_OUTPUT"
+ echo "pr_id=$PR_ID" >> "$GITHUB_OUTPUT"
+ else
+ echo "::notice::Triggering workflow was executed for a push event? Using the head_branch value."
+ echo "original_branch_ref=${{ github.event.workflow_run.head_branch }}" >> "$GITHUB_OUTPUT"
+ fi
+ # Checkout target branch (from the main repository)
+ - name: Check out target branch
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ persist-credentials: false
+ # By default, a workflow that is triggered with on workflow_run would run on the main (default) branch.
+ # Different branches might have different versions of Develocity, and we want to make sure
+ # that we publish with the one that we built the scan with in the first place.
+ ref: ${{ steps.determine_branch_ref.outputs.original_branch_ref }}
+ fetch-depth: 0
+
+ # Note: we need to check out the code with all the changes so that we have the sources,
+ # matching our compiled classes we'll pull from the build artifacts.
+ # We won't be running any builds from the checked out code,
+ # but we'll use the code to run the sonar scanner tool.
+ #
+ # Only needed if we are analysing the PR,
+ # as otherwise the previous checkout already did the work.
+ - name: Check out merged code (if PR)
+ env:
+ GH_TOKEN: ${{ github.token }}
+ run: |
+ if [ "${{ github.event.workflow_run.event }}" == "pull_request" ]; then
+ gh pr checkout ${{steps.determine_branch_ref.outputs.pr_id}}
+ fi
+
+ # so we aren't tempted to run a Gradle command!
+ rm -rf gradlew*
+
+ - name: Set up Java 25
+ uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5.2.0
+ with:
+ java-version: 25
+ distribution: temurin
+
+ - name: Download coverage reports
+ uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # 7.0.0
+ with:
+ pattern: build-results-data
+ github-token: ${{ github.token }}
+ repository: ${{ github.repository }}
+ run-id: ${{ github.event.workflow_run.id }}
+ path: .
+ merge-multiple: 'true'
+ # Don't fail the build if there are no matching artifacts
+ continue-on-error: true
+
+ - name: Install Sonar CLI
+ run: |
+ SONAR_HASH=8fbfb1eb546b734a60fc3e537108f06e389a8ca124fbab3a16236a8a51edcc15
+ SONAR_SCANNER_VERSION=8.0.1.6346
+ export SONAR_SCANNER_HOME=$HOME/.sonar/sonar-scanner-$SONAR_SCANNER_VERSION
+ curl --create-dirs -sSLo $HOME/.sonar/sonar-scanner.zip https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-$SONAR_SCANNER_VERSION.zip
+ DOWNLOADED_HASH=$(sha256sum $HOME/.sonar/sonar-scanner.zip | awk '{print $1}')
+ if [ "$DOWNLOADED_HASH" == "$SONAR_HASH" ]; then
+ echo "Successfully verified the file checksum"
+ else
+ echo "Error: Failed the file checksum verification. Expected: $SONAR_HASH but got $DOWNLOADED_HASH instead"
+ exit 1
+ fi
+ unzip -o $HOME/.sonar/sonar-scanner.zip -d $HOME/.sonar/
+ mv "$HOME/.sonar/sonar-scanner-$SONAR_SCANNER_VERSION"/* "$HOME/.sonar/"
+
+ - name: Sonar Analysis
+ env:
+ SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
+ run: |
+ find . -name "*.exec" -type f
+ EXTRA_ARGS=""
+ if [ "${{ github.event.workflow_run.event }}" == "pull_request" ]; then
+ echo "::notice::Triggering workflow was executed for a pull request"
+
+ FORK_OWNER="${{ github.event.workflow_run.head_repository.owner.login }}"
+ BRANCH_NAME="${{ github.event.workflow_run.head_branch }}"
+ if [ "${{ github.event.workflow_run.head_repository.owner.login }}" != "${{ github.event.workflow_run.repository.owner.login }}" ]; then
+ BRANCH_NAME="$FORK_OWNER:$BRANCH_NAME"
+ fi
+ TARGET_BRANCH=$(gh pr view "$BRANCH_NAME" --repo ${{ github.event.workflow_run.repository.full_name }} --json baseRefName -q .baseRefName)
+ PR_ID=$(gh pr view "$BRANCH_NAME" --repo ${{ github.event.workflow_run.repository.full_name }} --json number -q .number)
+
+ EXTRA_ARGS="-Dsonar.pullrequest.branch=$BRANCH_NAME -Dsonar.pullrequest.key=$PR_ID -Dsonar.pullrequest.base=${{steps.determine_branch_ref.outputs.original_branch_ref}} -Dsonar.pullrequest.provider=GitHub -Dsonar.pullrequest.github.repository=hibernate/hibernate-orm"
+ else
+ EXTRA_ARGS="-Dsonar.branch.name=${{github.event.workflow_run.head_branch}}"
+ fi
+
+ $HOME/.sonar/bin/sonar-scanner $EXTRA_ARGS \
+ -Dsonar.java.libraries="$(pwd)/target/sonar-dependencies/*.jar" \
+ -Dsonar.coverage.jacoco.xmlReportPaths="$(pwd)/reporting/target/reports/jacoco/mergeCodeCoverageReport/mergeCodeCoverageReport.xml"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 000000000000..35909c1feb38
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,356 @@
+name: GH Actions CI
+
+on:
+ pull_request:
+ branches:
+ - '6.6'
+
+permissions: {} # none
+
+# See https://github.com/hibernate/hibernate-orm/pull/4615 for a description of the behavior we're getting.
+concurrency:
+ # Consider that two builds are in the same concurrency group (cannot run concurrently)
+ # if they use the same workflow and are about the same branch ("ref") or pull request.
+ group: "workflow = ${{ github.workflow }}, ref = ${{ github.event.ref }}, pr = ${{ github.event.pull_request.id }}"
+ # Cancel previous builds in the same concurrency group even if they are in progress
+ # for pull requests or pushes to forks (not the upstream repository).
+ cancel-in-progress: ${{ github.event_name == 'pull_request' || github.repository != 'hibernate/hibernate-orm' }}
+
+jobs:
+
+ # Main job for h2/docker DBs.
+ build:
+ permissions:
+ contents: read
+ name: OpenJDK 11 - ${{matrix.rdbms}}
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - rdbms: h2
+ - rdbms: hsqldb
+ - rdbms: derby
+ - rdbms: mysql
+ - rdbms: mariadb
+ - rdbms: postgresql
+ - rdbms: edb
+ - rdbms: oracle
+ - rdbms: db2
+ - rdbms: mssql
+ - rdbms: sybase
+# Running with CockroachDB requires at least 2-4 vCPUs, which we don't have on GH Actions runners
+# - rdbms: cockroachdb
+# Running with HANA requires at least 8GB memory just for the database, which we don't have on GH Actions runners
+# - rdbms: hana
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ persist-credentials: false
+ - name: Reclaim Disk Space
+ run: .github/ci-prerequisites.sh
+ - name: Start database
+ env:
+ RDBMS: ${{ matrix.rdbms }}
+ run: ci/database-start.sh
+ - name: Set up Java 11
+ uses: actions/setup-java@v4
+ with:
+ distribution: 'temurin'
+ java-version: '11'
+
+ - name: Generate cache key
+ id: cache-key
+ run: |
+ CURRENT_BRANCH="${{ github.repository != 'hibernate/hibernate-orm' && 'fork' || github.base_ref || github.ref_name }}"
+ CURRENT_MONTH=$(/bin/date -u "+%Y-%m")
+ CURRENT_DAY=$(/bin/date -u "+%d")
+ ROOT_CACHE_KEY="buildtool-cache"
+ echo "buildtool-monthly-cache-key=${ROOT_CACHE_KEY}-${CURRENT_MONTH}" >> $GITHUB_OUTPUT
+ echo "buildtool-monthly-branch-cache-key=${ROOT_CACHE_KEY}-${CURRENT_MONTH}-${CURRENT_BRANCH}" >> $GITHUB_OUTPUT
+ echo "buildtool-cache-key=${ROOT_CACHE_KEY}-${CURRENT_MONTH}-${CURRENT_BRANCH}-${CURRENT_DAY}" >> $GITHUB_OUTPUT
+ - name: Cache Maven/Gradle Dependency/Dist Caches
+ id: cache-maven
+ uses: actions/cache@v4
+ # if it's not a pull request, we restore and save the cache
+ if: github.event_name != 'pull_request'
+ with:
+ path: |
+ ~/.m2/repository/
+ ~/.m2/wrapper/
+ ~/.gradle/caches/modules-2
+ ~/.gradle/wrapper/
+ # A new cache will be stored daily. After that first store of the day, cache save actions will fail because the cache is immutable but it's not a problem.
+ # The whole cache is dropped monthly to prevent unlimited growth.
+ # The cache is per branch but in case we don't find a branch for a given branch, we will get a cache from another branch.
+ key: ${{ steps.cache-key.outputs.buildtool-cache-key }}
+ restore-keys: |
+ ${{ steps.cache-key.outputs.buildtool-monthly-branch-cache-key }}-
+ ${{ steps.cache-key.outputs.buildtool-monthly-cache-key }}-
+ - name: Restore Maven/Gradle Dependency/Dist Caches
+ uses: actions/cache/restore@v4
+ # if it a pull request, we restore the cache but we don't save it
+ if: github.event_name == 'pull_request'
+ with:
+ path: |
+ ~/.m2/repository/
+ ~/.m2/wrapper/
+ ~/.gradle/caches/modules-2
+ ~/.gradle/wrapper/
+ key: ${{ steps.cache-key.outputs.buildtool-cache-key }}
+ restore-keys: |
+ ${{ steps.cache-key.outputs.buildtool-monthly-branch-cache-key }}-
+ ${{ steps.cache-key.outputs.buildtool-monthly-cache-key }}-
+
+ - name: Run build script
+ run: ./ci/build-github.sh
+ shell: bash
+ env:
+ RDBMS: ${{ matrix.rdbms }}
+ # For jobs running on 'push', publish build scan and cache immediately.
+ # This won't work for pull requests, since they don't have access to secrets.
+ POPULATE_REMOTE_GRADLE_CACHE: ${{ github.event_name == 'push' && github.repository == 'hibernate/hibernate-orm' && 'true' || 'false' }}
+ DEVELOCITY_ACCESS_KEY: "${{ secrets.DEVELOCITY_ACCESS_KEY }}"
+
+ # For jobs running on 'pull_request', upload build scan data.
+ # The actual publishing must be done in a separate job (see ci-report.yml).
+ # We don't write to the remote cache as that would be unsafe.
+ - name: Upload GitHub Actions artifact for the Develocity build scan
+ uses: actions/upload-artifact@v4
+ if: "${{ github.event_name == 'pull_request' && !cancelled() }}"
+ with:
+ name: build-scan-data-${{ matrix.rdbms }}
+ path: ~/.gradle/build-scan-data
+ - name: Store coverage report
+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ with:
+ name: build-coverage-data-${{ matrix.rdbms }}
+ retention-days: 1
+ path: |
+ ./**/target/jacoco/*.exec
+ - name: Store build results
+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ if: "${{ matrix.rdbms == 'h2' }}"
+ with:
+ name: build-compilation-data
+ retention-days: 1
+ path: |
+ ./**/target/resources/
+ ./**/target/classes/
+ ./**/target/generated/
+ .gradle/caches/build-cache-*
+ - name: Upload test reports (if Gradle failed)
+ uses: actions/upload-artifact@v4
+ if: failure()
+ with:
+ name: test-reports-java11-${{ matrix.rdbms }}
+ path: |
+ ./**/target/reports/tests/
+ - name: Omit produced artifacts from build cache
+ run: ./ci/before-cache.sh
+
+ # Job for builds on Atlas (Oracle) infrastructure.
+ # This is untrusted, even for pushes, see below.
+ otp:
+ permissions:
+ contents: read
+ name: GraalVM 21 - ${{matrix.rdbms}}
+ runs-on: [ self-hosted, Linux, X64, OracleTestPilot ]
+ if: github.repository == 'hibernate/hibernate-orm'
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ #- rdbms: autonomous-transaction-processing-serverless-26ai
+ #- rdbms: autonomous-transaction-processing-serverless-19c
+ - rdbms: base-database-service-19c
+ - rdbms: base-database-service-21c
+ - rdbms: base-database-service-26ai
+ steps:
+ - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ persist-credentials: false
+ - name: Set up Java 21
+ uses: graalvm/setup-graalvm@aafbedb8d382ed0ca6167d3a051415f20c859274 # v1.2.8
+ with:
+ distribution: 'graalvm'
+ java-version: '21'
+
+ - name: Generate cache key
+ id: cache-key
+ run: |
+ CURRENT_BRANCH="${{ github.repository != 'hibernate/hibernate-orm' && 'fork' || github.base_ref || github.ref_name }}"
+ CURRENT_MONTH=$(/bin/date -u "+%Y-%m")
+ CURRENT_DAY=$(/bin/date -u "+%d")
+ ROOT_CACHE_KEY="buildtool-cache-atlas"
+ echo "buildtool-monthly-cache-key=${ROOT_CACHE_KEY}-${CURRENT_MONTH}" >> $GITHUB_OUTPUT
+ echo "buildtool-monthly-branch-cache-key=${ROOT_CACHE_KEY}-${CURRENT_MONTH}-${CURRENT_BRANCH}" >> $GITHUB_OUTPUT
+ echo "buildtool-cache-key=${ROOT_CACHE_KEY}-${CURRENT_MONTH}-${CURRENT_BRANCH}-${CURRENT_DAY}" >> $GITHUB_OUTPUT
+ - name: Cache Maven/Gradle Dependency/Dist Caches
+ id: cache-maven
+ uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
+ # if it's not a pull request, we restore and save the cache
+ if: github.event_name != 'pull_request'
+ with:
+ path: |
+ ~/.m2/repository/
+ ~/.m2/wrapper/
+ ~/.gradle/caches/
+ ~/.gradle/caches/modules-2
+ !~/.gradle/caches/build-cache-*
+ ~/.gradle/wrapper/
+ # A new cache will be stored daily. After that first store of the day, cache save actions will fail because the cache is immutable, but it's not a problem.
+ # The whole cache is dropped monthly to prevent unlimited growth.
+ # The cache is per branch but in case we don't find a branch for a given branch, we will get a cache from another branch.
+ key: ${{ steps.cache-key.outputs.buildtool-cache-key }}
+ restore-keys: |
+ ${{ steps.cache-key.outputs.buildtool-monthly-branch-cache-key }}-
+ ${{ steps.cache-key.outputs.buildtool-monthly-cache-key }}-
+ - name: Restore Maven/Gradle Dependency/Dist Caches
+ uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
+ # if it is a pull request, we restore the cache, but we don't save it
+ if: github.event_name == 'pull_request'
+ with:
+ path: |
+ ~/.m2/repository/
+ ~/.m2/wrapper/
+ ~/.gradle/caches/modules-2
+ ~/.gradle/wrapper/
+ key: ${{ steps.cache-key.outputs.buildtool-cache-key }}
+ restore-keys: |
+ ${{ steps.cache-key.outputs.buildtool-monthly-branch-cache-key }}-
+ ${{ steps.cache-key.outputs.buildtool-monthly-cache-key }}-
+
+ - id: create_database
+ uses: loiclefevre/test@a802f8bb53b42b16c253d75f86b06360d150c6e4 # v1.0.22
+ with:
+ oci-service: ${{ matrix.rdbms }}
+ action: create
+ user: hibernate_orm_test
+
+ - name: Run build script
+ env:
+ RDBMS: ${{ matrix.rdbms }}
+ RUNID: ${{ github.run_number }}
+ TESTPILOT_CONNECTION_STRING_SUFFIX: ${{ steps.create_database.outputs.connection_string_suffix }}
+ TESTPILOT_PASSWORD: ${{ steps.create_database.outputs.database_password }}
+ API_HOST: ""
+ TESTPILOT_CLIENT_ID: ""
+ TESTPILOT_TOKEN: ""
+ # Needed for TFO (TCP fast open)
+ LD_PRELOAD: /home/ubuntu/libtfojdbc1.so
+ LD_LIBRARY_PATH: /home/ubuntu
+ run: ./ci/build-github.sh
+ shell: bash
+
+ - uses: loiclefevre/test@a802f8bb53b42b16c253d75f86b06360d150c6e4 # v1.0.22
+ if: always()
+ with:
+ oci-service: ${{ matrix.rdbms }}
+ action: delete
+ user: hibernate_orm_test
+
+ # Upload build scan data.
+ # The actual publishing must be done in a separate job (see ci-report.yml).
+ # We don't write to the remote cache as that would be unsafe.
+ # That's even on push, because we do not trust Atlas runners to hold secrets: they are shared infrastructure.
+ - name: Upload GitHub Actions artifact for the Develocity build scan
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
+ if: "${{ !cancelled() }}"
+ with:
+ name: build-scan-data-${{ matrix.rdbms }}
+ path: ~/.gradle/build-scan-data
+ - name: Store coverage report
+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ with:
+ name: build-coverage-data-${{ matrix.rdbms }}
+ retention-days: 1
+ path: |
+ ./**/target/jacoco/*.exec
+ - name: Upload test reports (if Gradle failed)
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
+ if: failure()
+ with:
+ name: test-reports-java11-${{ matrix.rdbms }}
+ path: |
+ ./**/target/reports/tests/
+ ./**/target/reports/checkstyle/
+ - name: Omit produced artifacts from build cache
+ run: ./ci/before-cache.sh
+
+ prepare-sonar-bundle:
+ name: Prepare build bundle for Sonar scanner
+ needs:
+ - build
+ - otp
+ if: |
+ always() && !cancelled()
+ && needs.build.result != 'cancelled' && needs.otp.result != 'cancelled'
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+
+ - name: Set up JDK 11
+ uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5.2.0
+ with:
+ java-version: '11'
+ distribution: 'temurin'
+
+ - name: Generate cache key
+ id: cache-key
+ run: |
+ CURRENT_BRANCH="${{ github.repository != 'hibernate/hibernate-orm' && 'fork' || github.base_ref || github.ref_name }}"
+ CURRENT_MONTH=$(/bin/date -u "+%Y-%m")
+ CURRENT_DAY=$(/bin/date -u "+%d")
+ ROOT_CACHE_KEY="buildtool-cache"
+ echo "buildtool-monthly-cache-key=${ROOT_CACHE_KEY}-${CURRENT_MONTH}" >> $GITHUB_OUTPUT
+ echo "buildtool-monthly-branch-cache-key=${ROOT_CACHE_KEY}-${CURRENT_MONTH}-${CURRENT_BRANCH}" >> $GITHUB_OUTPUT
+ echo "buildtool-cache-key=${ROOT_CACHE_KEY}-${CURRENT_MONTH}-${CURRENT_BRANCH}-${CURRENT_DAY}" >> $GITHUB_OUTPUT
+ - name: Restore Maven/Gradle Dependency/Dist Caches
+ uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
+ with:
+ path: |
+ ~/.m2/repository/
+ ~/.m2/wrapper/
+ ~/.gradle/caches/
+ !~/.gradle/caches/build-cache-*
+ ~/.gradle/wrapper/
+ key: ${{ steps.cache-key.outputs.buildtool-cache-key }}
+ restore-keys: |
+ ${{ steps.cache-key.outputs.buildtool-monthly-branch-cache-key }}-
+ ${{ steps.cache-key.outputs.buildtool-monthly-cache-key }}-
+
+ - name: Download compilation results
+ uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # 7.0.0
+ with:
+ name: build-compilation-data
+ path: .
+ # Don't fail the build if there are no matching artifacts (the build will re-compile things then)
+ continue-on-error: true
+
+ - name: Download coverage reports
+ uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # 7.0.0
+ with:
+ pattern: build-coverage-data*
+ path: .
+ merge-multiple: 'true'
+ # Don't fail the build if there are no matching artifacts
+ continue-on-error: true
+
+ - name: Merge Jacoco Reports
+ run: ./gradlew mergeCodeCoverageReport copyDependenciesSonar --no-parallel
+
+ - name: Store build info for Sonar scanning
+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ with:
+ name: build-results-data
+ retention-days: 1
+ path: |
+ ./**/target/jacoco/*.exec
+ ./**/target/classes/
+ ./**/target/generated/
+ ./**/target/resources/
+ ./**/target/reports/
+ ./**/target/sonar-dependencies/
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
deleted file mode 100644
index a05c7b585b5d..000000000000
--- a/.github/workflows/codeql.yml
+++ /dev/null
@@ -1,72 +0,0 @@
-name: "CodeQL"
-
-on:
- push:
- branches: [ 'main' ]
- pull_request:
- # The branches below must be a subset of the branches above
- branches: [ 'main' ]
- schedule:
- - cron: '34 11 * * 4'
-
-# See https://github.com/hibernate/hibernate-orm/pull/4615 for a description of the behavior we're getting.
-concurrency:
- # Consider that two builds are in the same concurrency group (cannot run concurrently)
- # if they use the same workflow and are about the same branch ("ref") or pull request.
- group: "workflow = ${{ github.workflow }}, ref = ${{ github.event.ref }}, pr = ${{ github.event.pull_request.id }}"
- # Cancel previous builds in the same concurrency group even if they are in process
- # for pull requests or pushes to forks (not the upstream repository).
- cancel-in-progress: ${{ github.event_name == 'pull_request' || github.repository != 'hibernate/hibernate-orm' }}
-
-jobs:
- analyze:
- name: Analyze
- runs-on: ubuntu-latest
- permissions:
- actions: read
- contents: read
- security-events: write
-
- strategy:
- fail-fast: false
- matrix:
- language: [ 'java' ]
- # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
- # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v4
-
- # Initializes the CodeQL tools for scanning.
- - name: Initialize CodeQL
- uses: github/codeql-action/init@v3
- with:
- languages: ${{ matrix.language }}
- # If you wish to specify custom queries, you can do so here or in a config file.
- # By default, queries listed here will override any specified in a config file.
- # Prefix the list here with "+" to use these queries and those in the config file.
-
- # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
- queries: +security-and-quality
-
-
- # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
- # If this step fails, then you should remove it and run the build manually (see below)
- - name: Autobuild
- uses: github/codeql-action/autobuild@v3
-
- # âšī¸ Command-line programs to run using the OS shell.
- # đ See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
-
- # If the Autobuild fails above, remove it and uncomment the following three lines.
- # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
-
- # - run: |
- # echo "Run, Build Application using script"
- # ./location_of_script_within_repo/buildscript.sh
-
- - name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v3
- with:
- category: "/language:${{matrix.language}}"
\ No newline at end of file
diff --git a/.github/workflows/contributor-build.yml b/.github/workflows/contributor-build.yml
deleted file mode 100644
index 8d393ce5cfdb..000000000000
--- a/.github/workflows/contributor-build.yml
+++ /dev/null
@@ -1,126 +0,0 @@
-# The main CI of Hibernate ORM is https://ci.hibernate.org/job/hibernate-orm-pipeline/.
-# However, Hibernate ORM builds run on GitHub actions regularly
-# to check that it still works and can be used in GitHub forks.
-# See https://docs.github.com/en/free-pro-team@latest/actions
-# for more information about GitHub actions.
-
-name: Hibernate ORM build
-
-on:
- push:
- branches:
- - 'main'
- # WARNING: Using pull_request_target to access secrets, but we check out the PR head commit.
- # See checkout action for details.
- pull_request_target:
- branches:
- - 'main'
-
-permissions: {} # none
-
-# See https://github.com/hibernate/hibernate-orm/pull/4615 for a description of the behavior we're getting.
-concurrency:
- # Consider that two builds are in the same concurrency group (cannot run concurrently)
- # if they use the same workflow and are about the same branch ("ref") or pull request.
- group: "workflow = ${{ github.workflow }}, ref = ${{ github.event.ref }}, pr = ${{ github.event.pull_request.id }}"
- # Cancel previous builds in the same concurrency group even if they are in process
- # for pull requests or pushes to forks (not the upstream repository).
- cancel-in-progress: ${{ github.event_name == 'pull_request_target' || github.repository != 'hibernate/hibernate-orm' }}
-
-jobs:
- build:
- permissions:
- contents: read
- name: Java 11
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- include:
- - rdbms: h2
- - rdbms: hsqldb
- - rdbms: derby
- - rdbms: mysql
- - rdbms: mariadb
- - rdbms: postgresql
- - rdbms: edb
- - rdbms: oracle
- - rdbms: db2
- - rdbms: mssql
- - rdbms: sybase
-# Running with CockroachDB requires at least 2-4 vCPUs, which we don't have on GH Actions runners
-# - rdbms: cockroachdb
-# Running with HANA requires at least 8GB memory just for the database, which we don't have on GH Actions runners
-# - rdbms: hana
- steps:
- - name: Check out commit already pushed to branch
- if: "! github.event.pull_request.number"
- uses: actions/checkout@v4
- with:
- persist-credentials: false
- - name: Check out PR head
- uses: actions/checkout@v4
- if: github.event.pull_request.number
- with:
- # WARNING: This is potentially dangerous since we're checking out unreviewed code,
- # and since we're using the pull_request_target event we can use secrets.
- # Thus, we must be extra careful to never expose secrets to steps that execute this code,
- # and to strictly limit our of secrets to those that only pose minor security threats.
- # This means in particular we won't expose Develocity credentials to the main gradle executions,
- # but instead will execute gradle a second time just to push build scans to Develocity;
- # see below.
- ref: "refs/pull/${{ github.event.pull_request.number }}/head"
- persist-credentials: false
- - name: Reclaim Disk Space
- run: .github/ci-prerequisites.sh
- - name: Start database
- env:
- RDBMS: ${{ matrix.rdbms }}
- run: ci/database-start.sh
- - name: Set up Java 11
- uses: actions/setup-java@v4
- with:
- distribution: 'temurin'
- java-version: '11'
- - name: Get year/month for cache key
- id: get-date
- run: echo "yearmonth=$(/bin/date -u "+%Y-%m")" >> $GITHUB_OUTPUT
- shell: bash
- - name: Cache Maven local repository
- uses: actions/cache@v4
- id: cache-maven
- with:
- path: |
- ~/.m2/repository
- ~/.gradle/caches/
- ~/.gradle/wrapper/
- # refresh cache every month to avoid unlimited growth
- key: maven-localrepo-${{ steps.get-date.outputs.yearmonth }}
- - name: Run build script
- env:
- RDBMS: ${{ matrix.rdbms }}
- # Don't populate Develocity cache in pull requests as that's potentially dangerous
- POPULATE_REMOTE_GRADLE_CACHE: "${{ github.event_name == 'push' }}"
- # WARNING: exposes secrets, so must only be passed to a step that doesn't run unapproved code.
- DEVELOCITY_ACCESS_KEY: "${{ github.event_name == 'push' && secrets.GRADLE_ENTERPRISE_ACCESS_KEY || '' }}"
- run: ./ci/build-github.sh
- shell: bash
- - name: Publish Develocity build scan for previous build (pull request)
- # Don't fail a build if publishing fails
- continue-on-error: true
- if: "${{ !cancelled() && github.event_name == 'pull_request_target' && github.repository == 'hibernate/hibernate-orm' }}"
- run: |
- ./gradlew buildScanPublishPrevious
- env:
- # WARNING: exposes secrets, so must only be passed to a step that doesn't run unapproved code.
- DEVELOCITY_ACCESS_KEY: ${{ secrets.GRADLE_ENTERPRISE_ACCESS_KEY_PR }}
- - name: Upload test reports (if Gradle failed)
- uses: actions/upload-artifact@v4
- if: failure()
- with:
- name: test-reports-java11-${{ matrix.rdbms }}
- path: |
- ./**/target/reports/tests/
- ./**/target/reports/checkstyle/
- - name: Omit produced artifacts from build cache
- run: ./ci/before-cache.sh
diff --git a/.gitignore b/.gitignore
index 1427882b7fdc..9c71eaa8514d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -52,3 +52,9 @@ databases/postgis/
# Vim
*.swp
*.swo
+
+# SDKman, used by some module maintainers
+.sdkmanrc
+
+# Sonar CLI local scan files:
+.scannerwork
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index b4f6d3a6ab24..57b4caddbd1a 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -51,6 +51,7 @@ GitHub there are a few pre-requisite steps to follow:
the linked page, this also includes:
* [set up your local git install](https://help.github.com/articles/set-up-git)
* clone your fork
+* Instruct git to ignore certain commits when using `git blame`. From the directory of your local clone, run this: `git config blame.ignoreRevsFile .git-blame-ignore-revs`
* See the wiki pages for setting up your IDE, whether you use
[IntelliJ IDEA](https://hibernate.org/community/contribute/intellij-idea/)
or [Eclipse](https://hibernate.org/community/contribute/eclipse-ide/)(1).
diff --git a/Jenkinsfile b/Jenkinsfile
index 9b6d025b00c9..df4ad0baf7c6 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -28,49 +28,20 @@ stage('Configure') {
requireApprovalForPullRequest 'hibernate'
this.environments = [
-// new BuildEnvironment( dbName: 'h2' ),
-// new BuildEnvironment( dbName: 'hsqldb' ),
-// new BuildEnvironment( dbName: 'derby' ),
-// new BuildEnvironment( dbName: 'mysql' ),
-// new BuildEnvironment( dbName: 'mariadb' ),
-// new BuildEnvironment( dbName: 'postgresql' ),
-// new BuildEnvironment( dbName: 'edb' ),
-// new BuildEnvironment( dbName: 'oracle' ),
-// new BuildEnvironment( dbName: 'db2' ),
-// new BuildEnvironment( dbName: 'mssql' ),
-// new BuildEnvironment( dbName: 'sybase' ),
-// Don't build with HANA by default, but only do it nightly until we receive a 3rd instance
-// new BuildEnvironment( dbName: 'hana_cloud', dbLockableResource: 'hana-cloud', dbLockResourceAsHost: true ),
new BuildEnvironment( node: 's390x' ),
- new BuildEnvironment( dbName: 'tidb', node: 'tidb',
- notificationRecipients: 'tidb_hibernate@pingcap.com' ),
+ new BuildEnvironment( dbName: 'sybase_jconn' ),
new BuildEnvironment( testJdkVersion: '17' ),
+ new BuildEnvironment( testJdkVersion: '21' ),
// We want to enable preview features when testing newer builds of OpenJDK:
// even if we don't use these features, just enabling them can cause side effects
// and it's useful to test that.
- new BuildEnvironment( testJdkVersion: '20', testJdkLauncherArgs: '--enable-preview' ),
- new BuildEnvironment( testJdkVersion: '21', testJdkLauncherArgs: '--enable-preview' ),
- new BuildEnvironment( testJdkVersion: '22', testJdkLauncherArgs: '--enable-preview' ),
+ new BuildEnvironment( testJdkVersion: '25', testJdkLauncherArgs: '--enable-preview' )
// The following JDKs aren't supported by Hibernate ORM out-of-the box yet:
// they require the use of -Dnet.bytebuddy.experimental=true.
// Make sure to remove that argument as soon as possible
// -- generally that requires upgrading bytebuddy after the JDK goes GA.
- new BuildEnvironment( testJdkVersion: '23', testJdkLauncherArgs: '--enable-preview -Dnet.bytebuddy.experimental=true' ),
- new BuildEnvironment( testJdkVersion: '24', testJdkLauncherArgs: '--enable-preview -Dnet.bytebuddy.experimental=true' )
];
- if ( env.CHANGE_ID ) {
- if ( pullRequest.labels.contains( 'cockroachdb' ) ) {
- this.environments.add( new BuildEnvironment( dbName: 'cockroachdb', node: 'cockroachdb', longRunning: true ) )
- }
- if ( pullRequest.labels.contains( 'hana' ) ) {
- this.environments.add( new BuildEnvironment( dbName: 'hana_cloud', dbLockableResource: 'hana-cloud', dbLockResourceAsHost: true ) )
- }
- if ( pullRequest.labels.contains( 'sybase' ) ) {
- this.environments.add( new BuildEnvironment( dbName: 'sybase_jconn' ) )
- }
- }
-
helper.configure {
file 'job-configuration.yaml'
// We don't require the following, but the build helper plugin apparently does
@@ -98,15 +69,17 @@ if (currentBuild.getBuildCauses().toString().contains('BranchIndexingCause')) {
currentBuild.result = 'NOT_BUILT'
return
}
+// This is a limited maintenance branch, so don't run this on pushes to the branch, only on PRs
+if ( !env.CHANGE_ID ) {
+ print "INFO: Build skipped because this job should only run for pull request, not for branch pushes"
+ currentBuild.result = 'NOT_BUILT'
+ return
+}
stage('Build') {
Map executions = [:]
Map> state = [:]
environments.each { BuildEnvironment buildEnv ->
- // Don't build environments for newer JDKs when this is a PR
- if ( helper.scmSource.pullRequest && buildEnv.testJdkVersion ) {
- return
- }
state[buildEnv.tag] = [:]
executions.put(buildEnv.tag, {
runBuildOnNode(buildEnv.node ?: NODE_PATTERN_BASE) {
@@ -144,16 +117,12 @@ stage('Build') {
state[buildEnv.tag]['containerName'] = "edb"
break;
case "sybase_jconn":
- docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
- docker.image('nguoianphu/docker-sybase').pull()
- }
+ docker.image('nguoianphu/docker-sybase').pull()
sh "./docker_db.sh sybase"
state[buildEnv.tag]['containerName'] = "sybase"
break;
case "cockroachdb":
- docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
- docker.image('cockroachdb/cockroach:v23.1.12').pull()
- }
+ docker.image('cockroachdb/cockroach:v23.1.12').pull()
sh "./docker_db.sh cockroachdb"
state[buildEnv.tag]['containerName'] = "cockroach"
break;
@@ -199,6 +168,9 @@ stage('Build') {
}
})
}
+ executions.put('Hibernate Search Update Dependency', {
+ build job: '/hibernate-search-dependency-update/7.2', propagate: true, parameters: [string(name: 'UPDATE_JOB', value: 'orm6.6'), string(name: 'ORM_REPOSITORY', value: helper.scmSource.remoteUrl), string(name: 'ORM_PULL_REQUEST_ID', value: helper.scmSource.pullRequest.id)]
+ })
parallel(executions)
}
@@ -242,7 +214,7 @@ void ciBuild(buildEnv, String args) {
// On untrusted nodes, we use the same access key as for PRs:
// it has limited access, essentially it can only push build scans.
- def develocityCredentialsId = buildEnv.node ? 'ge.hibernate.org-access-key-pr' : 'ge.hibernate.org-access-key'
+ def develocityCredentialsId = buildEnv.node ? 'develocity.commonhaus.dev-access-key-pr' : 'develocity.commonhaus.dev-access-key'
withCredentials([string(credentialsId: develocityCredentialsId,
variable: 'DEVELOCITY_ACCESS_KEY')]) {
@@ -251,13 +223,13 @@ void ciBuild(buildEnv, String args) {
}
}
}
- else if ( buildEnv.node && buildEnv.node != 's390x' ) { // We couldn't get the code below to work on s390x for some reason.
+ else if ( buildEnv.node != 's390x' ) { // We couldn't get the code below to work on s390x for some reason.
// Pull request: we can't pass credentials to the build, since we'd be exposing secrets to e.g. tests.
// We do the build first, then publish the build scan separately.
tryFinally({
sh "./ci/build.sh $args"
}, { // Finally
- withCredentials([string(credentialsId: 'ge.hibernate.org-access-key-pr',
+ withCredentials([string(credentialsId: 'develocity.commonhaus.dev-access-key-pr',
variable: 'DEVELOCITY_ACCESS_KEY')]) {
withGradle { // withDevelocity, actually: https://plugins.jenkins.io/gradle/#plugin-content-capturing-build-scans-from-jenkins-pipeline
// Don't fail a build if publishing fails
diff --git a/MAINTAINERS.md b/MAINTAINERS.md
index 87dddb4601ca..4192fecff530 100644
--- a/MAINTAINERS.md
+++ b/MAINTAINERS.md
@@ -89,8 +89,8 @@ In any case, before the release:
#### Performing the release
Once you trigger the CI job, it automatically pushes artifacts to the
-[OSSRH Maven Repository](https://repo1.maven.org/maven2/org/hibernate/orm/),
-and the documentation to [docs.jboss.org](https://docs.jboss.org/hibernate/orm/).
+[Maven Central Repository](https://repo1.maven.org/maven2/org/hibernate/orm/),
+and the documentation to [docs.hibernate.org](https://docs.hibernate.org/orm/).
* Do *not* mark the Jira Release as "released" or close issues,
the release job does it for you.
diff --git a/README.adoc b/README.adoc
index 9392eca52045..a1fbc80d7119 100644
--- a/README.adoc
+++ b/README.adoc
@@ -5,7 +5,7 @@ Hibernate implements JPA, the standard API for object/relational persistence in
See https://hibernate.org/orm/[Hibernate.org] for more information.
image:https://ci.hibernate.org/job/hibernate-orm-pipeline/job/main/badge/icon[Build Status,link=https://ci.hibernate.org/job/hibernate-orm-pipeline/job/main/]
-image:https://img.shields.io/badge/Revved%20up%20by-Develocity-06A0CE?logo=Gradle&labelColor=02303A[link=https://ge.hibernate.org/scans]
+image:https://img.shields.io/badge/Revved%20up%20by-Develocity-06A0CE?logo=Gradle&labelColor=02303A[link=https://develocity.commonhaus.dev/scans]
== Continuous Integration
diff --git a/build.gradle b/build.gradle
index ae7a4284a399..2dd27c9270bf 100644
--- a/build.gradle
+++ b/build.gradle
@@ -28,8 +28,6 @@ plugins {
id 'org.checkerframework' version '0.6.40'
id 'org.hibernate.orm.build.jdks'
- id 'io.github.gradle-nexus.publish-plugin' version '1.1.0'
-
id 'idea'
id 'org.jetbrains.gradle.plugin.idea-ext' version '1.0'
id 'eclipse'
@@ -42,77 +40,16 @@ apply from: file( 'gradle/module.gradle' )
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Release Task
-task release {
- description = "The task performed when we are performing a release build. Relies on " +
- "the fact that subprojects will appropriately define a release task " +
- "themselves if they have any release-related activities to perform"
-
- doFirst {
- def javaVersionsInUse = jdkVersions.allVersions
- if ( javaVersionsInUse != [JavaLanguageVersion.of( 11 )].toSet() ) {
- throw new IllegalStateException( "Please use JDK 11 to perform the release. Currently using: ${javaVersionsInUse}" )
- }
- }
-}
-
-task publish {
+tasks.register('publish') {
description = "The task performed when we want to just publish maven artifacts. Relies on " +
"the fact that subprojects will appropriately define a release task " +
"themselves if they have any publish-related activities to perform"
}
-ext {
- if ( project.hasProperty( 'hibernatePublishUsername' ) ) {
- if ( ! project.hasProperty( 'hibernatePublishPassword' ) ) {
- throw new GradleException( "Should specify both `hibernatePublishUsername` and `hibernatePublishPassword` as project properties" );
- }
- }
-}
-
-nexusPublishing {
- repositories {
- sonatype {
- username = project.hasProperty( 'hibernatePublishUsername' ) ? project.property( 'hibernatePublishUsername' ) : null
- password = project.hasProperty( 'hibernatePublishPassword' ) ? project.property( 'hibernatePublishPassword' ) : null
- }
- }
-}
-
-gradle.taskGraph.addTaskExecutionGraphListener(
- new TaskExecutionGraphListener() {
- @Override
- void graphPopulated(TaskExecutionGraph graph) {
- String[] tasksToLookFor = [
- 'publish',
- 'publishToSonatype',
- 'publishAllPublicationsToSonatype',
- 'publishPublishedArtifactsPublicationToSonatypeRepository',
- 'publishRelocationArtifactsPublicationToSonatypeRepository',
- ]
-
- for ( String taskToLookFor : tasksToLookFor ) {
- if ( graph.hasTask( taskToLookFor ) ) {
- // trying to publish - make sure the needed credentials are available
-
- if ( project.property( 'hibernatePublishUsername' ) == null ) {
- throw new RuntimeException( "`-PhibernatePublishUsername=...` not found" )
- }
- if ( project.property( 'hibernatePublishPassword' ) == null ) {
- throw new RuntimeException( "`-PhibernatePublishPassword=...` not found" )
- }
-
- break;
- }
- }
- }
- }
-)
-
-
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// CI Build Task
-task ciBuild {
+tasks.register('ciBuild') {
description = "The task performed when one of the 'main' jobs are triggered on the " +
"CI server. Just as above, relies on the fact that subprojects will " +
"appropriately define a release task themselves if they have any tasks " +
@@ -140,5 +77,20 @@ idea {
}
}
+tasks.register('copyDependenciesSonar', Copy) {
+ description = "Aggregates all runtime dependencies for Sonar CLI analysis."
+ def targetProjects = subprojects.findAll { it.name != 'reporting' }
+ targetProjects.each { sub ->
+ evaluationDependsOn(sub.path)
+
+ if (sub.plugins.hasPlugin('java')) {
+ from(sub.configurations.runtimeClasspath)
+ }
+ }
+
+ into(layout.buildDirectory.dir("sonar-dependencies"))
+
+ duplicatesStrategy = DuplicatesStrategy.EXCLUDE
+}
diff --git a/changelog.txt b/changelog.txt
index 06f70064586b..a1bc53b4c35a 100644
--- a/changelog.txt
+++ b/changelog.txt
@@ -1,8 +1,713 @@
Hibernate 6 Changelog
=======================
+Changes in 6.6.46.Final (March 30, 2026)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/38385
+
+
+** Bug
+ * HHH-20209 Race Condition in JavaTypeRegistry causing SemanticException during parallel UNION queries with projection.
+ * HHH-19429 ConcurrentModificationException observed while executing JPQL update query with VERSIONED clause
+
+Changes in 6.6.45.Final (March 22, 2026)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/38115
+
+
+** Bug
+ * HHH-20253 ClassCastException when using hibernate-enhance-maven-plugin plugin
+
+** Task
+ * HHH-20232 Update c3p0 to 0.12.0
+
+Changes in 6.6.44.Final (March 01, 2026)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/37977
+
+
+** Bug
+ * HHH-20176 Native Query cache causing ArrayIndexOutOfBoundsException with extra columns
+
+Changes in 6.6.43.Final (February 22, 2026)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/37572
+
+
+** Bug
+ * HHH-20168 Hibernate JsonWithArrayEmbeddableTest.testUpdateMultipleAggregateMembers test fail with Oracle 19c
+ * HHH-20161 Hibernate ArrayToStringWithArrayAggregateTest test fail with Oracle 19c
+
+Changes in 6.6.42.Final (February 01, 2026)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/37272
+
+
+** Bug
+ * HHH-20118 Vector operator SQL templates miss parenthesis around
+ * HHH-20069 `DB2iDialect.rowId` causes an error in merge queries
+
+Changes in 6.6.41.Final (January 18, 2026)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/37040
+
+
+** Bug
+ * HHH-20041 DB2 for z IN tuple list predicate performs badly
+ * HHH-20040 DB2iSqlAstTranslator using future versions for feature activation
+ * HHH-19929 DB2iDialect problem with supportsRowValueConstructorSyntaxInInSubQuery
+
+Changes in 6.6.40.Final (December 21, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/36905
+
+
+** Bug
+ * HHH-20008 Bad performance of MEMBER OF translation
+
+** Task
+ * HHH-19548 Upgrade to ByteBuddy 1.17.5
+
+Changes in 6.6.39.Final (December 14, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/36641
+
+
+** Bug
+ * HHH-19963 Wrong references in entity fields with circular associations
+ * HHH-19746 JPA parameters might be incorrectly handled resulting in exceptions or queries returning incorrect results
+
+** Improvement
+ * HHH-19943 Comparison of generic nested EmbeddedId's fails for JPQL and Criteria API
+
+Changes in 6.6.38.Final (November 30, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/36410
+
+
+** Bug
+ * HHH-19905 Implicit join re-use with nested inner and left joins causes ParsingException
+ * HHH-19883 JOIN TREAT ignores predicates
+ * HHH-18871 Nested NativeQuery mappings causing 'Could not locate TableGroup' exception after migration
+
+Changes in 6.6.37.Final (November 24, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/36080
+
+
+** Bug
+ * HHH-19936 Parameter casts for by-id lookups don't take column definition into account
+ * HHH-19926 NullPointerException when executing JPQL IN clause with null parameter on entity association
+ * HHH-19922 org.hibernate.orm:hibernate-platform:pom:7.1.7.Final is missing
+ * HHH-19910 EntityInitializer#resolveInstance wrongly initializes existing detached instance
+ * HHH-19038 Hibernate.get does not work on detached entities
+ * HHH-16991 EnhancedUserType cannot be used when defining relations
+
+Changes in 6.6.36.Final (November 16, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/36046
+
+
+** Bug
+ * HHH-19918 Avoid reflection when instantiating known FormatMapper
+ * HHH-19758 HQL parse failure with SLL can lead to wrong parse
+ * HHH-19240 Significant increase in heap allocation for queries after migrating Hibernate ORM 6.5 to 6.6
+
+Changes in 6.6.35.Final (November 13, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/35782
+
+
+** Bug
+ * HHH-19895 hibernate-core 6.6.30.Final breaks compatibility on entities with composite keys for multiple variants of DB2
+ * HHH-19888 FetchPlusOffsetParameterBinder fails to apply static offset for root pagination
+ * HHH-19887 Wrong ClassLoader used for Jackson Module discovery
+ * HHH-19739 Exceptions during load of entity with different persistent fields with same name
+
Note: Please refer to JIRA to learn more about each issue.
+Changes in 6.6.34.Final (October 27, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/35683
+
+** Task
+ * [HHH-19889] - Mention third-party dialects throughout the documentation
+ * [HHH-19869] - Regroup all dialect information under a single guide
+
+
+Changes in 6.6.33.Final (October 10, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/35651
+
+** Bug
+ * [HHH-19853] - Gradle plugin org.hibernate.orm not published for 6.6.31 / 6.2.47
+
+
+Changes in 6.6.32.Final (October 09, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/35485
+
+** Bug
+ * [HHH-19853] - Gradle plugin org.hibernate.orm not published for 6.6.31 / 6.2.47
+
+
+Changes in 6.6.31.Final (October 03, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/35420
+
+** Bug
+ * [HHH-18885] - ClassCastException with queued persist in an extra lazy Map
+ * [HHH-18936] - remove parent with @OnDelete(CASCADE) leads to TransientObjectException
+
+** Task
+ * [HHH-19800] - Migrate to release scripts for documentation publishing
+
+
+Changes in 6.6.30.Final (September 29, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/35320
+
+** Bug
+ * [HHH-19784] - Bytecode enhancement generates wrong field access method for classes in different JARs but with same package name deployed in the same EAR
+ * [HHH-19768] - Wrong supportsRowValueConstructorSyntaxInInSubQuery leads to bad performing queries
+ * [HHH-19681] - AssertionError on extract results from array containing jsonb values
+
+
+Changes in 6.6.29.Final (September 14, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/35156
+
+** Bug
+ * [HHH-19756] - Invalid SQL generated when using treat() with joined = discriminator inheritance and same attribute names
+ * [HHH-19747] - Hibernate Envers can not handle @EnumeratedValue annotation
+ * [HHH-19201] - BlobProxy with InputStream reads whole stream into a byte array
+
+
+Changes in 6.6.28.Final (August 31, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/35057
+
+** Bug
+ * [HHH-19740] - Collection table deletion for table per class subclass entity fails with UnknownTableReferenceException
+ * [HHH-19738] - JDBC password logged when specified via jakarta.persistence.jdbc.password
+ * [HHH-19734] - Cache hit of bytecode enhanced proxy with shallow query cache layout fails
+ * [HHH-19648] - Recursive @Embeddable mapping leads to stack overflow
+
+
+Changes in 6.6.27.Final (August 24, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/34957
+
+** Bug
+ * [HHH-19719] - org.hibernate.query.sqm.function.SelfRenderingSqmWindowFunction#appendHqlString throws IndexOutOfBoundsException when has no arguments
+ * [HHH-19712] - Column deduplication leads to wrong alias calculation for native query alias expansion
+ * [HHH-19687] - Criteria query with lazy @OneToOne and @EmbeddedId throws exception
+
+
+Changes in 6.6.26.Final (August 17, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/34859
+
+** Bug
+ * [HHH-18968] - MySQLDialect wrongly uses Timestamp as type for localtime function
+ * [HHH-18621] - Hibernate 6 disregards hibernate.jdbc.batch_versioned_data
+
+
+Changes in 6.6.25.Final (August 10, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/34660
+
+** Bug
+ * [HHH-19453] - sequence support not working on db2 As400 7.3
+ * [HHH-17522] - Support correlation of CTEs
+
+
+Changes in 6.6.24.Final (August 03, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/34495
+
+** Bug
+ * [HHH-19675] - JdbcTypeRegistry#hasRegisteredDescriptor should account for constructed types
+ * [HHH-19657] - OffsetDateTime in an array is handled incorrectly with setting hibernate.type.java_time_use_direct_jdbc=true
+ * [HHH-19655] - Connection leak with PhysicalConnectionHandlingMode.IMMEDIATE_ACQUISITION_AND_HOLD
+ * [HHH-19651] - DB2 for i detection not working reliably
+ * [HHH-19585] - Object relationship mapping issues | java.lang.NullPointerException: Cannot invoke "java.lang.Comparable.compareTo(Object)" because "one" is null
+ * [HHH-19579] - Criteria update join - Column 'code' in SET is ambiguous
+ * [HHH-19031] - Loading an Entity a second time when it contains an embedded object causes IllegalArgumentException
+ * [HHH-18981] - IndexOutOfBoundsException when using arrayToString with arrayAgg
+ * [HHH-18956] - Native query brace replacement breaks dollar quoted literals in 6.6+
+ * [HHH-18780] - Performance regression on Postgres with polymorphic query due to incorrect casts of null columns
+
+
+Changes in 6.6.23.Final (July 27, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/34397
+
+** Bug
+ * [HHH-19575] - empty Struct should be fetched intact or not null
+ * [HHH-19524] - @OneToOne relationship unnecessary joins in nativeQuery
+ * [HHH-19261] - OracleDialect getQueryHintString incorrectly joins supplied hints
+ * [HHH-16253] - [Envers] Schema Validation Failure With Audited (N)Clob Column with Hibernate 6 on H2
+
+** Task
+ * [HHH-19639] - Add getter for EmbeddableFetchImpl#nullIndicatorResult
+
+
+Changes in 6.6.22.Final (July 20, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/34297
+
+** Bug
+ * [HHH-19621] - SUBSTRING function for DB2i Series is broken
+ * [HHH-19579] - Criteria update join - Column 'code' in SET is ambiguous
+ * [HHH-19550] - Attribute join on correlated from node receives wrong root
+ * [HHH-19524] - @OneToOne relationship unnecessary joins in nativeQuery
+ * [HHH-19457] - Inheritance with type JOINED not working in a related entity
+ * [HHH-19368] - Group by and single-table inheritance sub-select query error
+ * [HHH-19031] - Loading an Entity a second time when it contains an embedded object causes IllegalArgumentException
+
+** Task
+ * [HHH-19624] - Test EDB with the EDB drivers
+
+
+Changes in 6.6.21.Final (July 13, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/34199
+
+** Bug
+ * [HHH-19596] - NPE when array/collection of Struct contains null value
+ * [HHH-19542] - Embeddable in secondary table fails to recognize a nested embeddable is in the same table
+
+
+Changes in 6.6.20.Final (July 06, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/34165
+
+** Bug
+ * [HHH-19464] - Storing a binary data into BLOB on Oracle cutting off its content.
+ * [HHH-19396] - Cannot select the same column twice (with different aliases) while using CTE
+ * [HHH-19076] - expecting IdClass mapping sessionfactory error with specific @IdClass setup with inheritence
+ * [HHH-18898] - Specific mistake in HQL gives NullPointerException in AbstractSqlAstTranslator
+ * [HHH-18837] - Oracle epoch extraction doesn't work with dates
+ * [HHH-18581] - Performance degradation from Hibernate 5 to 6 on NativeQuery
+
+** Improvement
+ * [HHH-19558] - allow JDBC escapes in native SQL queries
+
+
+Changes in 6.6.19.Final (June 29, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/33968
+
+** Bug
+ * [HHH-18837] - Oracle epoch extraction doesn't work with dates
+ * [HHH-19547] - Misleading exception message at DefaultFlushEntityEventListener - mangled ID - misplaced Entity and EntityEntry ID
+ * [HHH-19560] - TupleTransformer and ResultListTransformer trash the query interpretation cache
+ * [HHH-19571] - CloningPropertyCall causes non-deterministic bytecode for AccessOptimizer
+ * [HHH-19573] - Presence of wrapper byte array pollutes BasicTypeRegistry
+ * [HHH-19577] - BytecodeProviderImpl.SetPropertyValues wrongly emits duplicate stack map frames
+
+
+Changes in 6.6.18.Final (June 13, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/33703
+
+** Bug
+ * [HHH-19533] - Implement equals() and hashCode() for NativeQueryConstructorTransformer
+ * [HHH-19529] - Check bytecode generated classes with stable names class loaders
+ * [HHH-18891] - java.lang.AssertionError generated in getResolvedInstance even though NotFound IGNORE set
+ * [HHH-18876] - ArrayInitializer#resolveInstanceSubInitializers should consider @ListIndexBase
+ * [HHH-18771] - ListInitializer should consistently consider @ListIndexBase
+
+
+Changes in 6.6.17.Final (May 28, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/33638
+
+** Bug
+ * [HHH-19490] - NPE when using array_position function
+ * [HHH-19476] - claimEntityHolderIfPossible Assertion Error
+ * [HHH-19387] - AssertionError in EntityInitializerImpl data.concreteDescriptor is null
+ * [HHH-18946] - Startup issues with HANA in failover situations
+
+
+Changes in 6.6.16.Final (May 25, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/33606
+
+** Bug
+ * [HHH-19477] - ConnectionReleaseMode.AFTER_STATEMENT ineffective due to missing connection release
+ * [HHH-19473] - Bytecode enhancement incorrectly generates code for Bidirectional Generic Entities
+ * [HHH-19472] - Native query "SELECT 1" with result type Object[] return singular object
+ * [HHH-19387] - AssertionError in EntityInitializerImpl data.concreteDescriptor is null
+ * [HHH-19372] - AccessOptimizer.setPropertyValues() and getPropertyValues() error with entity hierarchy.
+ * [HHH-19369] - Entity hierarchy ordering error within metamodel building with enhancement
+ * [HHH-19207] - JPA OrderBy annotated relation not ordered when using entity graph with criteria api
+ * [HHH-18813] - DML update of secondary table column fails
+
+
+Changes in 6.6.15.Final (May 13, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/33406
+
+** Bug
+ * [HHH-19458] - problem starting MockSessionFactory from Quarkus dev mode with Reactive
+
+
+Changes in 6.6.14.Final (May 11, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/33241
+
+** Bug
+ * [HHH-19425] - incorrect class literals in Processor-generated code
+ * [HHH-19375] - fix check for presence of Quarkus in reactive case
+ * [HHH-19374] - repositories should always be @Dependent
+ * [HHH-19320] - Assigned id value is not passed into BeforeExecutionGenerator#generate() method when allowAssignedIdentifiers() is true and id has been assigned
+ * [HHH-19314] - StackOverflowException when using onConflict with createCriteriaInsertValues and createCriteriaInsertSelect
+ * [HHH-19306] - Composite generator may not respect the event types of generators it consits of
+ * [HHH-19036] - ORM Filter are searched for in the wrong order
+
+
+Changes in 6.6.13.Final (April 06, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/33142
+
+** Bug
+ * [HHH-19314] - StackOverflowException when using onConflict with createCriteriaInsertValues and createCriteriaInsertSelect
+
+
+Changes in 6.6.12.Final (March 30, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/32945
+
+** Bug
+ * [HHH-19109] - Hibernate Data Repositories are @RequestScoped
+ * [HHH-19059] - Bytecode enhancement fails when inherited fields are mapped using property access in subclass
+ * [HHH-19017] - Class Cast Exception for PersistentAttributeInterceptable
+ * [HHH-18920] - Enum parameters in Jakarta Data repository method return type constructor are not properly matched
+ * [HHH-18745] - Unnecessary joins when use TREAT operator
+ * [HHH-14694] - Use stable proxy names to avoid managing proxy state and memory leaks
+
+** Task
+ * [HHH-19230] - Ensure that thread local for org.hibernate.bytecode.enhance.internal.bytebuddy.SafeCacheProvider + OverridingClassFileLocator are completely cleared
+
+
+Changes in 6.6.11.Final (March 16, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/32879
+
+** Bug
+ * [HHH-19246] - Fetch join makes partially covered EntityGraph ineffective
+ * [HHH-19220] - ClassCastException: class org.hibernate.bytecode.enhance.spi.LazyPropertyInitializer$1 cannot be cast to class java.lang.String
+ * [HHH-19140] - Enhanced entities with AccessType.PROPERTY does not work well with inheritance
+ * [HHH-19107] - Entities with @EmbeddedId not supported with CrudRepository
+ * [HHH-19106] - @Transaction(TxType) not working with Hibernate Data Repositories
+ * [HHH-19052] - Hibernate 6.6.X regression with join formula
+ * [HHH-18894] - Hibernate 6.6 enum literal is considered field literal instead
+ * [HHH-18881] - In MySQL, array of dates are not converted correctly
+ * [HHH-18858] - array fields and static metamodel
+ * [HHH-18787] - Custom UserType not recognised for array properties
+ * [HHH-18570] - Invalid SQL when filter contains identifier named date
+
+
+Changes in 6.6.10.Final (March 09, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/32647
+
+** Bug
+ * [HHH-19232] - BeanValidationEventListener not called if only associated collection is updated via getter
+ * [HHH-19206] - Bytecode-enhanced dirty checking ineffective if entity's embedded ID set manually (to same value)
+ * [HHH-19195] - Embeddable inheritance: discriminator values are not hierarchically ordered
+
+** Improvement
+ * [HHH-19219] - Informix Catalog and schema support
+
+
+Changes in 6.6.9.Final (February 23, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/32614
+
+** Bug
+ * [HHH-19173] - PostgreSQLLegacySqlAstTranslator does not implement visitInArrayPredicates
+ * [HHH-19116] - Error when using fk() function on left joined many-to-one association and is null predicate
+ * [HHH-19110] - Flush operation fails with "UnsupportedOperationException: compare() not implemented for EntityType"
+ * [HHH-17151] - NPE when binding null parameter in native query with explicit TemporalType
+
+
+Changes in 6.6.8.Final (February 16, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/32516
+
+** Bug
+ * [HHH-19126] - Plural valued paths should be collection-typed instead of element typed
+ * [HHH-18988] - Embeddable inheritance + default_schema results in NPE at startup
+
+** Improvement
+ * [HHH-19098] - Disable implicit loading of the default import script
+
+
+Changes in 6.6.7.Final (February 10, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/32416
+
+** Bug
+ * [HHH-19104] - Envers is keeping references to classes and thus classloaders
+ * [HHH-18901] - AnnotationFormatError: Duplicate annotation for class: interface org.hibernate.bytecode.enhance.spi.EnhancementInfo
+ * [HHH-18069] - NullPointerException when unioning partition results
+
+** Task
+ * [HHH-19050] - Allow configuration of EntityManagerFactoryBuilderImpl to override the BytecodeProvider instance
+ * [HHH-18928] - Consider the default Access Type as per Spec section 2.3.1 and skip enhancement of properties accessor
+
+
+Changes in 6.6.6.Final (February 02, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/32382
+
+** Bug
+ * [HHH-19079] - ComponentType.replace can cause ArrayIndexOutOfBoundsException when used with embeddable inheritance
+ * [HHH-19069] - Performance regression for wide inheritance models
+ * [HHH-19034] - Wrong reuse of a Join
+ * [HHH-18961] - JtaIsolationDelegate, obtaining connection : NPE when SQLExceptionConversionDelegate#convert returns null
+ * [HHH-18933] - the ordering of the class declaration in persistence.xml seems to affect the metamodel
+
+** Task
+ * [HHH-19078] - Improve release process error message when no issues with corresponding version are found
+
+
+Changes in 6.6.5.Final (January 19, 2025)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/32379
+
+** Bug
+ * [HHH-19011] - @ElementCollection comment overrides class level comment on an Entity
+ * [HHH-18819] - Error resolving persistent property of @MapperSuperclass if subtype @Embeddable used as @IdClass
+ * [HHH-17652] - Cannot invoke "org.hibernate.envers.internal.entities.EntityConfiguration.getRelationDescription(String)" because "entCfg" is null
+
+** Task
+ * [HHH-18972] - Upgrade to ByteBuddy 1.15.11
+
+
+Changes in 6.6.4.Final (December 18, 2024)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/32370
+
+** Bug
+ * [HHH-18949] - Hibernate Processor should not insert underscores within uppercase names
+ * [HHH-18932] - Wrongly using FK column instead of PK when using joined alias
+ * [HHH-18912] - ORM release process
+ * [HHH-18904] - Bytecode Enhancement fails with UnsupportedEnhancementStrategy.FAIL for pre-persist method
+ * [HHH-18903] - Bytecode enhancement fails for entities that contain a method named get
+ * [HHH-18872] - ConcreteProxy type not restored from 2LC when loading a ManyToOne
+ * [HHH-18868] - Wrong behaviour of getAttribute method in impl. of ManagedType when scattered id attributes are used in MappedSuperclass
+ * [HHH-18863] - jpamodelgen 6.6 performance issue in Eclipse IDE
+ * [HHH-18850] - createCountQuery with Hibernate 6.6.2
+ * [HHH-18709] - CriteriaUpdate involving JSON field containing Map results in SemanticException
+ * [HHH-18689] - 'FULL' query cache sometimes incomplete
+ * [HHH-18629] - Inconsistent column alias generated while result class is used for placeholder
+ * [HHH-18583] - Joined + discriminator inheritance treat in where clause not restricting to subtype
+ * [HHH-18384] - @JoinColumnsOrFormulas broken
+ * [HHH-18274] - Problems with generics in queries; proposed partial solution
+ * [HHH-17838] - @OneToOne relationship + @Embeddable keys + FetchType.LAZY fail in most recent version
+ * [HHH-17612] - DefaultRevisionEntity: Illegal argument on static metamodel field injection
+ * [HHH-14725] - Using a InputStream with BlobProxy and Envers results in java.sql.SQLException: could not reset reader
+ * [HHH-13815] - TransientObjectException after merging a bidirectional one-to-many with orphan deletion
+ * [HHH-13790] - Temporary session not being closed
+ * [HHH-13377] - Lazy loaded properties of bytecode enhanced entity are left stale after refresh of entity
+
+** Sub-task
+ * [HHH-18369] - Support Informix matches() function
+ * [HHH-18367] - Informix sum on case expression error
+ * [HHH-18365] - Informix function bit_length() error
+ * [HHH-18364] - Informix function locate() error
+ * [HHH-18363] - Informix component nullness check error
+ * [HHH-18362] - Informix function substring() error
+ * [HHH-18361] - Informix current_time error
+ * [HHH-18360] - Informix function str() error
+
+** Task
+ * [HHH-18917] - Follow all of the JavaBeans rules in enhance/internal/bytebuddy/EnhancerImpl when checking if a class can be enhanced
+ * [HHH-18906] - Allow specifying UnsupportedEnhancementStrategy for Hibernate testing
+
+
+Changes in 6.6.3.Final (November 21, 2024)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/32365
+
+** Bug
+ * [HHH-18862] - Group by error due to subselect using foreign key reference instead of primary key in HQL query
+ * [HHH-18851] - ArrayContainsArgumentTypeResolver wrongly infers array type for needle argument
+ * [HHH-18842] - Regression: CollectionType.replace() breaks if target is PersistentCollection, but not instance of Collection (e.g. PersistentMap)
+ * [HHH-18832] - Bytecode enhancement skipped for entities with "compute-only" @Transient properties
+ * [HHH-18816] - Error when rendering the fk-side of an association in an exists subquery
+ * [HHH-18703] - JoinedSubclassEntityPersister#getTableNameForColumn KO
+ * [HHH-18647] - SemanticException when using createCriteriaInsertValues to insert into foreign key column
+
+** Improvement
+ * [HHH-18841] - Make `_identifierMapper` property added for a IdClass synthetic
+ * [HHH-18833] - Configuration to fail bytecode enhancement instead of skipping it on unsupported models
+
+** Task
+ * [HHH-18846] - Enable release automation for ORM 6.6
+
+
+Changes in 6.6.2.Final (November 07, 2024)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/32350
+
+** Bug
+ * [HHH-18773] - Multiple selections of same alias triggers possible non-threadsafe access to the session
+ * [HHH-18770] - NPE when using the JFR integration with JFR disabled
+ * [HHH-18764] - Class cast exception when using non basic type as identifier and in an embedded field using a natural ID
+ * [HHH-18761] - named query method generation for @NamedQuery on entity
+ * [HHH-18739] - Do not support join queries when using Mysql
+ * [HHH-18730] - Multi-column association in aggregate component doesn't work
+ * [HHH-18720] - Type check on select columns in union all gives SemanticException when there is a null column
+ * [HHH-18719] - Previous row state reuse can provide detached entities to the consumer
+ * [HHH-18713] - saveOrUpdate changed behaviour with bytecode enhancer
+ * [HHH-18712] - Warning about attempts to update an immutable entity for normal (not immutable) entity
+ * [HHH-18702] - Exception using @EmbeddedId with @OneToMany that refers to an alternate key column
+ * [HHH-18699] - Correctly handle @Id and @Version fields in query validation in Hibernate Processor
+ * [HHH-18697] - JPA 3.2 spec compliance for uppercasing of names in Hibernate Processor
+ * [HHH-18696] - @Find method for single @NaturalId field
+ * [HHH-18692] - Hibernate attempts to close batched statements multiple times
+ * [HHH-18681] - InterpretationException executing subquery in case-when : o.h.query.sqm.tree.select.SqmSelection.getExpressible() is null
+ * [HHH-18675] - Self-referencing many-to-many relation on generic entity gives NullPointerException in mapping
+ * [HHH-18669] - NullPointerException in the AgroalConnectionProvider
+ * [HHH-18667] - Annotation processor leaks - OOME when used in Eclipse IDE
+ * [HHH-18658] - Inner join prevents finding an entity instance referencing an empty map
+ * [HHH-18645] - AssertionError in AbstractBatchEntitySelectFetchInitializer#registerToBatchFetchQueue
+ * [HHH-18642] - DB2: select from new table with identity column not working when missing read permission
+ * [HHH-18635] - Avoid using `bigdatetime` column type on Sybase jconn when not necessary
+ * [HHH-18632] - Concurrency issue with AbstractEntityPersister#nonLazyPropertyLoadPlansByName
+ * [HHH-18631] - AssertionError when loading an entity after removing another, associated entity
+ * [HHH-18628] - Regression: Unable to determine TableReference
+ * [HHH-18617] - Fetching unowned side of bidirectional OneToOne mappings including tenant identifier triggers EntityFilteredException
+ * [HHH-18614] - TransientObjectException: session.update() does not save new entities in OneToMany relation when using bytecode enhancement
+ * [HHH-18608] - NPE in EntityInitializerImpl.resolveInstanceSubInitializers
+ * [HHH-18596] - ValueHandlingMode hack in query pagination
+ * [HHH-18582] - Mapping array of arrays with @JdbcTypeCode(SqlTypes.ARRAY) causes NPE
+ * [HHH-18575] - IN predicate with numeric/decimal parameter types leads to Binding is multi-valued; illegal call to #getBindValue
+ * [HHH-18564] - Literal expressions using AttributeConverters stopped working in hibernate 6
+ * [HHH-18551] - Memory leak caused by AbstractArrayJavaType#getRecommendedJdbcType
+ * [HHH-18515] - Unrecognized discriminator value exception when running native query on entity with discriminator column
+ * [HHH-18513] - Session Metrics for pre-partial-flushes are wrong
+ * [HHH-18500] - Gradle plugin crashes on module-info when extended enhancement is set
+ * [HHH-18494] - UnknownTableReferenceException in native query with placeholder when entity contains a to-one with a join table
+ * [HHH-18491] - Resuming null transaction in JtaIsolationDelegate
+ * [HHH-18471] - Since 6.2.2 Dialect SybaseAnywhereDialect does not render table names in the selection query
+ * [HHH-18450] - Inconsistent "SELECT 1" versus "SELECT ?1" with result type Object[]
+ * [HHH-18409] - In 6.5.2.Final, byte[] instance variables annotated with @NaturalId cannot be found with a natural ID query (regression from Hibernate 5.6.15.Final)
+ * [HHH-18389] - `merge()` with `orphanRemoval = true` leads to "HibernateException: A collection with cascade="all-delete-orphan" was no longer referenced by the owning entity instance"
+ * [HHH-18321] - SqlTreeCreationException when using sum/diff with coalesce on properties of embeddable component
+ * [HHH-18131] - Composite identifiers with associations stopped working with @IdClass
+ * [HHH-17739] - unhelpful CCE for field with unsupported collection type
+ * [HHH-16572] - Skip enhancement for PROPERTY attributes with mismatched field and method names
+
+** Improvement
+ * [HHH-18698] - respect @Nonnull annotation applied to parameters of @Find method
+ * [HHH-18654] - Change setting docs to use an asciidoc section per setting (User Guide)
+ * [HHH-18640] - Add note to migration guide about @Table and subclasses
+
+
+Changes in 6.6.1.Final (September 17, 2024)
+------------------------------------------------------------------------------------------------------------------------
+
+https://hibernate.atlassian.net/projects/HHH/versions/32340
+
+** Bug
+ * [HHH-18571] - Entities and collections with batch size 1 are treated as batchable
+ * [HHH-18565] - Bytecode enhancement, assertion error on reloading *toOne entities
+ * [HHH-18560] - DB2iDialect executes incompatible query in combination with @AuditJoinTable mapping
+ * [HHH-18558] - Informix UUID type support
+ * [HHH-18556] - Expressions.nullExpresion() in querydsl result in NPE in SqmExpressible
+ * [HHH-18531] - Informix timestamp literal error
+ * [HHH-18524] - @IdClass and composite key generation not working with meta-annotated @IdGenerationType
+ * [HHH-18518] - MySQLDialect fails to detect version on azure flexible server
+ * [HHH-18511] - ArrayIndexOutOfBoundsException in ImmutableFetchList
+ * [HHH-18506] - Flush performance degradation due to itable stubs
+ * [HHH-18503] - ID select criteria query fails with joined + discriminator inheritance when entity has subtypes
+ * [HHH-18502] - Type conversion error due to surprising javac method selection in SqmSubQuery#in
+ * [HHH-18493] - Resolving already initialized collection elements leads to assertion error
+ * [HHH-18490] - Static metamodel contains wrong type for Embeddable with generic attribute extending from mapped superclass
+ * [HHH-18489] - Lazy, unowned one-to-one associations get loaded eagerly in queries - even with bytecode enhancement
+ * [HHH-18487] - Error: Detached entity wiht id X has an uninitialized version value '1''
+ * [HHH-18486] - Unable to add Persist the object because of EntityNameResolver being ignored
+ * [HHH-18484] - Update fails when a many-to-one property on an entity with a generated value is changed
+ * [HHH-18480] - ClassCastException when updating a Blob with Oracle
+ * [HHH-18478] - Cannot invoke "org.hibernate.persister.entity.EntityPersister.getSubclassId()" because "data.concreteDescriptor" is null when reloading entity from query cache
+ * [HHH-18476] - Fetchable scrolling with KeyToOne leads to assertion error
+ * [HHH-18472] - Fix memory issues for release job
+ * [HHH-18470] - Duplicate foreign key names generated for inheritance type TABLE_PER_CLASS
+ * [HHH-18469] - Assertion Error when fetch joining complex entities structure
+ * [HHH-18466] - Regression from 5.6.15.Final where a mutable natural IDs query cannot find an entity
+ * [HHH-18447] - cast as boolean broken in Hibernate 6
+ * [HHH-18445] - Embeddable as Java Record has wrong order of columns
+ * [HHH-18439] - NullPointerException when access data from query cache
+ * [HHH-18436] - Wrong order-by rendering order for nested collections if loaded with entity graph
+ * [HHH-18414] - Duplicated filter applied to 'find()' method
+ * [HHH-18400] - AttributeConverter causing type-check failure when comparing Date field to parameter
+ * [HHH-18353] - ArrayConstructorFunction comparing argument types by reference causes FunctionArgumentException
+ * [HHH-18337] - SequenceStyleGenerator not respecting physical naming strategy
+ * [HHH-18282] - generate error sql in case of @DiscriminatorValue("not null") and the entity is superclass of other entity
+ * [HHH-18174] - Hibernate polymorphic join in complex subquery not using discriminator
+ * [HHH-18103] - MappedSuperclass handling for Embeddable is broken in metamodel
+
+** Improvement
+ * [HHH-18625] - Add Configurable#create(GeneratorCreationContext)
+ * [HHH-18507] - allow overwriting the default db image with environment variable in docker_db.sh
+ * [HHH-18459] - Add SingleStore community dialect
+ * [HHH-17646] - JOINED @Inheritance leads to bad performance in Hibernate 6
+
+** Task
+ * [HHH-18612] - Avoid writing files to classpath in tests to avoid spurious failures
+
+
Changes in 6.6.0.Final (August 08, 2024)
------------------------------------------------------------------------------------------------------------------------
diff --git a/ci/build.sh b/ci/build.sh
index 826347a0a112..fa944dd5b82f 100755
--- a/ci/build.sh
+++ b/ci/build.sh
@@ -56,6 +56,10 @@ elif [ "$RDBMS" == "oracle_db23c" ]; then
export SERVICE=$(echo $INFO | jq -r '.database' | jq -r '.service')
# I have no idea why, but these tests don't seem to work on CI...
goal="-Pdb=oracle_cloud_db23c -DrunID=$RUNID -DdbHost=$HOST -DdbService=$SERVICE"
+# OTP
+elif [ "$RDBMS" == "autonomous-transaction-processing-serverless-19c" ] || [ "$RDBMS" == "autonomous-transaction-processing-serverless-26ai" ] || [ "$RDBMS" == "autonomous-transaction-processing-serverless" ] || [ "$RDBMS" == "base-database-service-19c" ] || [ "$RDBMS" == "base-database-service-21c" ] || [ "$RDBMS" == "base-database-service-26ai" ]; then
+ echo "Managing OTP Database..."
+ goal="-Pdb=oracle_test_pilot_database -DrunID=$RUNID -DdbPassword=$TESTPILOT_PASSWORD -DdbConnectionStringSuffix=$TESTPILOT_CONNECTION_STRING_SUFFIX"
elif [ "$RDBMS" == "db2" ]; then
goal="-Pdb=db2_ci"
elif [ "$RDBMS" == "db2_10_5" ]; then
diff --git a/ci/jpa-3.1-tck.Jenkinsfile b/ci/jpa-3.1-tck.Jenkinsfile
index 87b18513bb00..4ad1006e6824 100644
--- a/ci/jpa-3.1-tck.Jenkinsfile
+++ b/ci/jpa-3.1-tck.Jenkinsfile
@@ -1,4 +1,4 @@
-@Library('hibernate-jenkins-pipeline-helpers@1.13') _
+@Library('hibernate-jenkins-pipeline-helpers') _
// Avoid running the pipeline on branch indexing
if (currentBuild.getBuildCauses().toString().contains('BranchIndexingCause')) {
@@ -6,106 +6,103 @@ if (currentBuild.getBuildCauses().toString().contains('BranchIndexingCause')) {
currentBuild.result = 'NOT_BUILT'
return
}
-def throttleCount
-// Don't build the TCK on PRs, unless they use the tck label
-if ( env.CHANGE_ID != null ) {
- if ( !pullRequest.labels.contains( 'tck' ) ) {
- print "INFO: Build skipped because pull request doesn't have 'tck' label"
- return
- }
- throttleCount = 20
-}
-else {
- throttleCount = 1
+// This is a limited maintenance branch, so don't run this on pushes to the branch, only on PRs
+if ( !env.CHANGE_ID ) {
+ print "INFO: Build skipped because this job should only run for pull request, not for branch pushes"
+ currentBuild.result = 'NOT_BUILT'
+ return
}
pipeline {
- agent {
- label 'LongDuration'
- }
+ agent none
tools {
jdk 'OpenJDK 11 Latest'
}
options {
- rateLimitBuilds(throttle: [count: throttleCount, durationName: 'day', userBoost: true])
buildDiscarder(logRotator(numToKeepStr: '3', artifactNumToKeepStr: '3'))
disableConcurrentBuilds(abortPrevious: true)
}
parameters {
choice(name: 'IMAGE_JDK', choices: ['jdk11'], description: 'The JDK base image version to use for the TCK image.')
- string(name: 'TCK_VERSION', defaultValue: '3.1.5', description: 'The version of the Jakarta JPA TCK i.e. `2.2.0` or `3.0.1`')
- string(name: 'TCK_SHA', defaultValue: '01072e6bdf56f0f8818290b8819f492ac95bb83fab14070d36aa7158a4f5eeed', description: 'The SHA256 of the Jakarta JPA TCK that is distributed under https://download.eclipse.org/jakartaee/persistence/3.1/jakarta-persistence-tck-${TCK_VERSION}.zip.sha256')
+ string(name: 'TCK_VERSION', defaultValue: '3.1.6', description: 'The version of the Jakarta JPA TCK i.e. `2.2.0` or `3.0.1`')
+ string(name: 'TCK_SHA', defaultValue: '790ca7a2a95ea098cfedafa2689c0d7a379fa62c74fed9505dd23191292f59fe', description: 'The SHA256 of the Jakarta JPA TCK that is distributed under https://download.eclipse.org/jakartaee/persistence/3.1/jakarta-persistence-tck-${TCK_VERSION}.zip.sha256')
string(name: 'TCK_URL', defaultValue: '', description: 'The URL from which to download the TCK ZIP file. Only needed for testing staged builds. Ensure the TCK_VERSION variable matches the ZIP file name suffix.')
booleanParam(name: 'NO_SLEEP', defaultValue: true, description: 'Whether the NO_SLEEP patch should be applied to speed up the TCK execution')
}
stages {
- stage('Build') {
+ stage('Checks') {
steps {
- script {
- docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
- docker.image('openjdk:11-jdk').pull()
- }
- }
- dir('hibernate') {
- checkout scm
- sh './gradlew publishToMavenLocal -PmavenMirror=nexus-load-balancer-c4cf05fd92f43ef8.elb.us-east-1.amazonaws.com -DjakartaJpaVersion=3.1.0'
- script {
- env.HIBERNATE_VERSION = sh (
- script: "grep hibernateVersion gradle/version.properties|cut -d'=' -f2",
- returnStdout: true
- ).trim()
- }
- }
- dir('tck') {
- checkout changelog: false, poll: false, scm: [$class: 'GitSCM', branches: [[name: '*/main']], extensions: [], userRemoteConfigs: [[url: 'https://github.com/hibernate/jakarta-tck-runner.git']]]
- script {
- if ( params.TCK_URL == null || params.TCK_URL.isEmpty() ) {
- sh "cd jpa-3.1; docker build -f Dockerfile.${params.IMAGE_JDK} -t jakarta-tck-runner --build-arg TCK_VERSION=${params.TCK_VERSION} --build-arg TCK_SHA=${params.TCK_SHA} ."
- }
- else {
- sh "cd jpa-3.1; docker build -f Dockerfile.${params.IMAGE_JDK} -t jakarta-tck-runner --build-arg TCK_VERSION=${params.TCK_VERSION} --build-arg TCK_SHA=${params.TCK_SHA} --build-arg TCK_URL=${params.TCK_URL} ."
- }
- }
- }
- }
- }
- stage('Run TCK') {
- steps {
- sh """ \
- rm -Rf ./results
- docker rm -f tck || true
- docker volume rm -f tck-vol || true
- docker volume create tck-vol
- docker run -v ~/.m2/repository/org/hibernate:/root/.m2/repository/org/hibernate:z -v tck-vol:/tck/persistence-tck/tmp/:z -e NO_SLEEP=${params.NO_SLEEP} -e HIBERNATE_VERSION=$HIBERNATE_VERSION --name tck jakarta-tck-runner
- docker cp tck:/tck/persistence-tck/tmp/ ./results
- """
- archiveArtifacts artifacts: 'results/**'
- script {
- failures = sh (
- script: """ \
- set +x
- while read line; do
- if [[ "\$line" != *"Passed." ]]; then
- echo "\$line"
- fi
- done .*@${env.HIBERNATE_VERSION}@' pom.xml", returnStatus: true)
+ if ( sedStatus != 0 ) {
+ throw new IllegalArgumentException( "Unable to replace hibernate version in Quarkus pom. Got exit code $sedStatus" )
+ }
+ }
+ // Need to override the default maven configuration this way, because there is no other way to do it
+ sh "sed -i 's/-Xmx5g/-Xmx2048m/' ./.mvn/jvm.config"
+ sh "echo -e '\\n-XX:MaxMetaspaceSize=1024m'>>./.mvn/jvm.config"
+ withMaven(mavenLocalRepo: env.WORKSPACE + '/.m2repository', publisherStrategy:'EXPLICIT') {
+ sh "./mvnw -pl !docs -Dquickly install"
+ // Need to kill the gradle daemons started during the Maven install run
+ sh "sudo pkill -f '.*GradleDaemon.*' || true"
+ // Need to override the default maven configuration this way, because there is no other way to do it
+ sh "sed -i 's/-Xmx2048m/-Xmx1340m/' ./.mvn/jvm.config"
+ sh "sed -i 's/MaxMetaspaceSize=1024m/MaxMetaspaceSize=512m/' ./.mvn/jvm.config"
+ def excludes = "'!integration-tests/kafka-oauth-keycloak,!integration-tests/kafka-sasl-elytron,!integration-tests/hibernate-search-orm-opensearch,!integration-tests/maven,!integration-tests/quartz,!integration-tests/reactive-messaging-kafka,!integration-tests/resteasy-reactive-kotlin/standard,!integration-tests/opentelemetry-reactive-messaging,!integration-tests/virtual-threads/kafka-virtual-threads,!integration-tests/smallrye-jwt-oidc-webapp,!extensions/oidc-db-token-state-manager/deployment,!docs'"
+ sh "TESTCONTAINERS_RYUK_CONTAINER_PRIVILEGED=true ./mvnw -Dinsecure.repositories=WARN -pl :quarkus-hibernate-orm -amd -pl ${excludes} verify -Dstart-containers -Dtest-containers -Dskip.gradle.build"
+ }
+ }
+ }
+ }
+ }
+ }
+ post {
+ always {
+ notifyBuildResult maintainers: "andrea@hibernate.org steve@hibernate.org christian.beikov@gmail.com mbellade@redhat.com"
+ }
+ }
+}
diff --git a/ci/release/Jenkinsfile b/ci/release/Jenkinsfile
index 0304265957d2..54058f4f2a2e 100644
--- a/ci/release/Jenkinsfile
+++ b/ci/release/Jenkinsfile
@@ -9,7 +9,7 @@
/*
* See https://github.com/hibernate/hibernate-jenkins-pipeline-helpers
*/
-@Library('hibernate-jenkins-pipeline-helpers@1.17') _
+@Library('hibernate-jenkins-pipeline-helpers') _
import org.hibernate.jenkins.pipeline.helpers.version.Version
@@ -17,11 +17,10 @@ import org.hibernate.jenkins.pipeline.helpers.version.Version
// Global build configuration
env.PROJECT = "orm"
env.JIRA_KEY = "HHH"
-def RELEASE_ON_PUSH = false // Set to `true` *only* on branches where you want a release on each push.
+def RELEASE_ON_SCHEDULE = true // Set to `true` *only* on branches where you want a scheduled release.
print "INFO: env.PROJECT = ${env.PROJECT}"
print "INFO: env.JIRA_KEY = ${env.JIRA_KEY}"
-print "INFO: RELEASE_ON_PUSH = ${RELEASE_ON_PUSH}"
// --------------------------------------------
// Build conditions
@@ -34,10 +33,17 @@ if (currentBuild.getBuildCauses().toString().contains('BranchIndexingCause')) {
}
def manualRelease = currentBuild.getBuildCauses().toString().contains( 'UserIdCause' )
+def cronRelease = currentBuild.getBuildCauses().toString().contains( 'TimerTriggerCause' )
// Only do automatic release on branches where we opted in
-if ( !manualRelease && !RELEASE_ON_PUSH ) {
- print "INFO: Build skipped because automated releases are disabled on this branch. See constant RELEASE_ON_PUSH in ci/release/Jenkinsfile"
+if ( !manualRelease && !cronRelease ) {
+ print "INFO: Build skipped because automated releases on push are disabled on this branch."
+ currentBuild.result = 'NOT_BUILT'
+ return
+}
+
+if ( !manualRelease && cronRelease && !RELEASE_ON_SCHEDULE ) {
+ print "INFO: Build skipped because automated releases are disabled on this branch. See constant RELEASE_ON_SCHEDULE in ci/release/Jenkinsfile"
currentBuild.result = 'NOT_BUILT'
return
}
@@ -58,14 +64,17 @@ def checkoutReleaseScripts() {
pipeline {
agent {
- label 'Worker&&Containers'
+ label 'Release'
+ }
+ triggers {
+ // Run every week Sunday midnight
+ cron('0 0 * * 0')
}
tools {
jdk 'OpenJDK 11 Latest'
}
options {
buildDiscarder logRotator(daysToKeepStr: '30', numToKeepStr: '10')
- rateLimitBuilds(throttle: [count: 1, durationName: 'day', userBoost: true])
disableConcurrentBuilds(abortPrevious: false)
preserveStashes()
}
@@ -89,9 +98,13 @@ pipeline {
)
}
stages {
- stage('Release check') {
+ stage('Check') {
steps {
script {
+ print "INFO: params.RELEASE_VERSION = ${params.RELEASE_VERSION}"
+ print "INFO: params.DEVELOPMENT_VERSION = ${params.DEVELOPMENT_VERSION}"
+ print "INFO: params.RELEASE_DRY_RUN? = ${params.RELEASE_DRY_RUN}"
+
checkoutReleaseScripts()
def currentVersion = Version.parseDevelopmentVersion( sh(
@@ -107,7 +120,9 @@ pipeline {
echo "Release was requested manually"
if ( !params.RELEASE_VERSION ) {
- throw new IllegalArgumentException( 'Missing value for parameter RELEASE_VERSION. This parameter must be set explicitly to prevent mistakes.' )
+ throw new IllegalArgumentException(
+ 'Missing value for parameter RELEASE_VERSION. This parameter must be set explicitly to prevent mistakes.'
+ )
}
releaseVersion = Version.parseReleaseVersion( params.RELEASE_VERSION )
@@ -118,12 +133,15 @@ pipeline {
else {
echo "Release was triggered automatically"
- // Avoid doing an automatic release for commits from a release
- def lastCommitter = sh(script: 'git show -s --format=\'%an\'', returnStdout: true)
- def secondLastCommitter = sh(script: 'git show -s --format=\'%an\' HEAD~1', returnStdout: true)
- if (lastCommitter == 'Hibernate-CI' && secondLastCommitter == 'Hibernate-CI') {
- print "INFO: Automatic release skipped because last commits were for the previous release"
- currentBuild.result = 'ABORTED'
+ // Avoid doing an automatic release if there are no "releasable" commits since the last release (see release scripts for determination)
+ def releasableCommitCount = sh(
+ script: ".release/scripts/count-releasable-commits.sh ${env.PROJECT}",
+ returnStdout: true
+ ).trim().toInteger()
+ if ( releasableCommitCount <= 0 ) {
+ print "INFO: Automatic release skipped because no releasable commits were pushed since the previous release"
+ currentBuild.getRawBuild().getExecutor().interrupt(Result.NOT_BUILT)
+ sleep(1) // Interrupt is not blocking and does not take effect immediately.
return
}
@@ -148,13 +166,14 @@ pipeline {
env.RELEASE_VERSION = releaseVersion.toString()
env.DEVELOPMENT_VERSION = developmentVersion.toString()
env.SCRIPT_OPTIONS = params.RELEASE_DRY_RUN ? "-d" : ""
+ env.JRELEASER_DRY_RUN = params.RELEASE_DRY_RUN
// Determine version id to check if Jira version exists
sh ".release/scripts/determine-jira-version-id.sh ${env.JIRA_KEY} ${releaseVersion.withoutFinalQualifier}"
}
}
}
- stage('Release prepare') {
+ stage('Prepare') {
steps {
script {
checkoutReleaseScripts()
@@ -163,31 +182,24 @@ pipeline {
configFile(fileId: 'release.config.ssh', targetLocation: "${env.HOME}/.ssh/config"),
configFile(fileId: 'release.config.ssh.knownhosts', targetLocation: "${env.HOME}/.ssh/known_hosts")
]) {
- withCredentials([
- usernamePassword(credentialsId: 'ossrh.sonatype.org', passwordVariable: 'OSSRH_PASSWORD', usernameVariable: 'OSSRH_USER'),
- usernamePassword(credentialsId: 'gradle-plugin-portal-api-key', passwordVariable: 'PLUGIN_PORTAL_PASSWORD', usernameVariable: 'PLUGIN_PORTAL_USERNAME'),
- file(credentialsId: 'release.gpg.private-key', variable: 'RELEASE_GPG_PRIVATE_KEY_PATH'),
- string(credentialsId: 'release.gpg.passphrase', variable: 'RELEASE_GPG_PASSPHRASE')
- ]) {
- sshagent(['ed25519.Hibernate-CI.github.com', 'hibernate.filemgmt.jboss.org', 'hibernate-ci.frs.sourceforge.net']) {
- // set release version
- // update changelog from JIRA
- // tags the version
- // changes the version to the provided development version
- withEnv([
- "BRANCH=${env.GIT_BRANCH}",
- // Increase the amount of memory for this part since asciidoctor doc rendering consumes a lot of metaspace
- "GRADLE_OPTS=-Dorg.gradle.jvmargs='-Dlog4j2.disableJmx -Xmx4g -XX:MaxMetaspaceSize=768m -XX:+HeapDumpOnOutOfMemoryError -Duser.language=en -Duser.country=US -Duser.timezone=UTC -Dfile.encoding=UTF-8'"
- ]) {
- sh ".release/scripts/prepare-release.sh ${env.PROJECT} ${env.RELEASE_VERSION} ${env.DEVELOPMENT_VERSION}"
- }
+ sshagent(['ed25519.Hibernate-CI.github.com', 'hibernate-ci.frs.sourceforge.net']) {
+ // set release version
+ // update changelog from JIRA
+ // tags the version
+ // changes the version to the provided development version
+ withEnv([
+ "DISABLE_REMOTE_GRADLE_CACHE=true",
+ // Increase the amount of memory for this part since asciidoctor doc rendering consumes a lot of metaspace
+ "GRADLE_OPTS=-Dorg.gradle.jvmargs='-Dlog4j2.disableJmx -Xmx4g -XX:MaxMetaspaceSize=768m -XX:+HeapDumpOnOutOfMemoryError -Duser.language=en -Duser.country=US -Duser.timezone=UTC -Dfile.encoding=UTF-8'"
+ ]) {
+ sh ".release/scripts/prepare-release.sh -j -b ${env.GIT_BRANCH} -v ${env.DEVELOPMENT_VERSION} ${env.PROJECT} ${env.RELEASE_VERSION}"
}
}
}
}
}
}
- stage('Publish release') {
+ stage('Publish') {
steps {
script {
checkoutReleaseScripts()
@@ -197,23 +209,42 @@ pipeline {
configFile(fileId: 'release.config.ssh.knownhosts', targetLocation: "${env.HOME}/.ssh/known_hosts")
]) {
withCredentials([
- usernamePassword(credentialsId: 'ossrh.sonatype.org', passwordVariable: 'OSSRH_PASSWORD', usernameVariable: 'OSSRH_USER'),
- usernamePassword(credentialsId: 'gradle-plugin-portal-api-key', passwordVariable: 'PLUGIN_PORTAL_PASSWORD', usernameVariable: 'PLUGIN_PORTAL_USERNAME'),
+ usernamePassword(credentialsId: 'central.sonatype.com', passwordVariable: 'JRELEASER_MAVENCENTRAL_TOKEN', usernameVariable: 'JRELEASER_MAVENCENTRAL_USERNAME'),
+ // https://docs.gradle.org/current/userguide/publishing_gradle_plugins.html#account_setup
+ usernamePassword(credentialsId: 'gradle-plugin-portal-api-key', passwordVariable: 'GRADLE_PUBLISH_SECRET', usernameVariable: 'GRADLE_PUBLISH_KEY'),
+ gitUsernamePassword(credentialsId: 'username-and-token.Hibernate-CI.github.com', gitToolName: 'Default'),
file(credentialsId: 'release.gpg.private-key', variable: 'RELEASE_GPG_PRIVATE_KEY_PATH'),
- string(credentialsId: 'release.gpg.passphrase', variable: 'RELEASE_GPG_PASSPHRASE'),
- gitUsernamePassword(credentialsId: 'username-and-token.Hibernate-CI.github.com', gitToolName: 'Default')
+ string(credentialsId: 'release.gpg.passphrase', variable: 'JRELEASER_GPG_PASSPHRASE'),
+ string(credentialsId: 'Hibernate-CI.github.com', variable: 'JRELEASER_GITHUB_TOKEN')
]) {
- sshagent(['ed25519.Hibernate-CI.github.com', 'hibernate.filemgmt.jboss.org', 'hibernate-ci.frs.sourceforge.net']) {
+ sshagent(['ed25519.Hibernate-CI.github.com', 'jenkins.in.relation.to', 'hibernate-ci.frs.sourceforge.net']) {
// performs documentation upload and Sonatype release
// push to github
- sh ".release/scripts/publish.sh ${env.SCRIPT_OPTIONS} ${env.PROJECT} ${env.RELEASE_VERSION} ${env.DEVELOPMENT_VERSION} ${env.GIT_BRANCH}"
+ withEnv([
+ "DISABLE_REMOTE_GRADLE_CACHE=true"
+ ]) {
+ def ghReleaseNote = sh(script: 'realpath -e release_notes.md 2>/dev/null', returnStdout: true).trim()
+
+ sh ".release/scripts/publish.sh -j ${ghReleaseNote != '' ? '--notes=' + ghReleaseNote : ''} ${env.SCRIPT_OPTIONS} ${env.PROJECT} ${env.RELEASE_VERSION} ${env.DEVELOPMENT_VERSION} ${env.GIT_BRANCH} "
+ }
}
}
}
}
}
}
- stage('Website release') {
+ stage('Release on Jira') {
+ steps {
+ script {
+ checkoutReleaseScripts()
+
+ withCredentials([string(credentialsId: 'release-webhook.hibernate.atlassian.net', variable: 'JIRA_WEBHOOK_SECRET')]) {
+ sh ".release/scripts/jira-release.sh ${env.SCRIPT_OPTIONS} ${env.PROJECT} ${env.RELEASE_VERSION} ${env.DEVELOPMENT_VERSION}"
+ }
+ }
+ }
+ }
+ stage('Update website') {
steps {
script {
checkoutReleaseScripts()
@@ -225,14 +256,17 @@ pipeline {
withCredentials([
gitUsernamePassword(credentialsId: 'username-and-token.Hibernate-CI.github.com', gitToolName: 'Default')
]) {
- sshagent( ['ed25519.Hibernate-CI.github.com', 'hibernate.filemgmt.jboss.org', 'hibernate-ci.frs.sourceforge.net'] ) {
+ sshagent( ['ed25519.Hibernate-CI.github.com'] ) {
dir( '.release/hibernate.org' ) {
- checkout scmGit(
- branches: [[name: '*/production']],
- extensions: [],
- userRemoteConfigs: [[credentialsId: 'ed25519.Hibernate-CI.github.com', url: 'https://github.com/hibernate/hibernate.org.git']]
- )
- sh "../scripts/website-release.sh ${env.SCRIPT_OPTIONS} ${env.PROJECT} ${env.RELEASE_VERSION}"
+ // Lock to avoid rejected pushes when multiple releases try to clone-commit-push
+ lock('hibernate.org-git') {
+ checkout scmGit(
+ branches: [[name: '*/production']],
+ extensions: [],
+ userRemoteConfigs: [[credentialsId: 'ed25519.Hibernate-CI.github.com', url: 'https://github.com/hibernate/hibernate.org.git']]
+ )
+ sh "../scripts/website-release.sh ${env.SCRIPT_OPTIONS} ${env.PROJECT} ${env.RELEASE_VERSION}"
+ }
}
}
}
@@ -240,16 +274,6 @@ pipeline {
}
}
}
- stage('GitHub release') {
- steps {
- script {
- checkoutReleaseScripts()
- withCredentials([string(credentialsId: 'Hibernate-CI.github.com', variable: 'GITHUB_API_TOKEN')]) {
- sh ".release/scripts/github-release.sh ${env.SCRIPT_OPTIONS} ${env.PROJECT} ${env.RELEASE_VERSION}"
- }
- }
- }
- }
}
post {
always {
@@ -258,4 +282,4 @@ pipeline {
}
}
}
-}
\ No newline at end of file
+}
diff --git a/ci/snapshot-publish.Jenkinsfile b/ci/snapshot-publish.Jenkinsfile
index 24cd67d92f1b..9d7a2fb9ff35 100644
--- a/ci/snapshot-publish.Jenkinsfile
+++ b/ci/snapshot-publish.Jenkinsfile
@@ -10,9 +10,17 @@ if (currentBuild.getBuildCauses().toString().contains('BranchIndexingCause')) {
return
}
+def checkoutReleaseScripts() {
+ dir('.release/scripts') {
+ checkout scmGit(branches: [[name: '*/main']], extensions: [],
+ userRemoteConfigs: [[credentialsId: 'ed25519.Hibernate-CI.github.com',
+ url: 'https://github.com/hibernate/hibernate-release-scripts.git']])
+ }
+}
+
pipeline {
agent {
- label 'Fedora'
+ label 'Release'
}
tools {
jdk 'OpenJDK 11 Latest'
@@ -30,23 +38,29 @@ pipeline {
}
stage('Publish') {
steps {
- withCredentials([
- usernamePassword(credentialsId: 'ossrh.sonatype.org', usernameVariable: 'hibernatePublishUsername', passwordVariable: 'hibernatePublishPassword'),
- usernamePassword(credentialsId: 'plugins.gradle.org', usernameVariable: 'hibernatePluginPortalUsername', passwordVariable: 'hibernatePluginPortalPassword'),
- string(credentialsId: 'ge.hibernate.org-access-key', variable: 'DEVELOCITY_ACCESS_KEY'),
- string(credentialsId: 'release.gpg.passphrase', variable: 'SIGNING_PASS'),
- file(credentialsId: 'release.gpg.private-key', variable: 'SIGNING_KEYRING')
- ]) {
- sh '''./gradlew clean publish \
- -PhibernatePublishUsername=$hibernatePublishUsername \
- -PhibernatePublishPassword=$hibernatePublishPassword \
- -Pgradle.publish.key=$hibernatePluginPortalUsername \
- -Pgradle.publish.secret=$hibernatePluginPortalPassword \
- --no-scan \
- -DsigningPassword=$SIGNING_PASS \
- -DsigningKeyFile=$SIGNING_KEYRING \
- '''
- }
+ script {
+ withCredentials([
+ // https://github.com/gradle-nexus/publish-plugin#publishing-to-maven-central-via-sonatype-ossrh
+ // https://docs.gradle.org/current/samples/sample_publishing_credentials.html#:~:text=via%20environment%20variables
+ usernamePassword(credentialsId: 'central.sonatype.com', passwordVariable: 'ORG_GRADLE_PROJECT_snapshotsPassword', usernameVariable: 'ORG_GRADLE_PROJECT_snapshotsUsername'),
+ string(credentialsId: 'Hibernate-CI.github.com', variable: 'JRELEASER_GITHUB_TOKEN'),
+ // https://docs.gradle.org/current/userguide/publishing_gradle_plugins.html#account_setup
+ usernamePassword(credentialsId: 'gradle-plugin-portal-api-key', passwordVariable: 'GRADLE_PUBLISH_SECRET', usernameVariable: 'GRADLE_PUBLISH_KEY'),
+ gitUsernamePassword(credentialsId: 'username-and-token.Hibernate-CI.github.com', gitToolName: 'Default')
+ ]) {
+ withEnv([
+ "DISABLE_REMOTE_GRADLE_CACHE=true"
+ ]) {
+ checkoutReleaseScripts()
+ def version = sh(
+ script: ".release/scripts/determine-current-version.sh orm",
+ returnStdout: true
+ ).trim()
+ echo "Current version: '${version}'"
+ sh "bash -xe .release/scripts/snapshot-deploy.sh orm ${version}"
+ }
+ }
+ }
}
}
}
@@ -57,4 +71,4 @@ pipeline {
}
}
}
-}
\ No newline at end of file
+}
diff --git a/databases/mariadb/matrix.gradle b/databases/mariadb/matrix.gradle
index f66f5cacb300..b72fdee03569 100644
--- a/databases/mariadb/matrix.gradle
+++ b/databases/mariadb/matrix.gradle
@@ -4,4 +4,4 @@
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or .
*/
-jdbcDependency 'org.mariadb.jdbc:mariadb-java-client:3.4.0'
+jdbcDependency 'org.mariadb.jdbc:mariadb-java-client:3.5.1'
diff --git a/design/working/6.0-posts.adoc b/design/working/6.0-posts.adoc
index 690f6780b8a7..d0dec78fade3 100644
--- a/design/working/6.0-posts.adoc
+++ b/design/working/6.0-posts.adoc
@@ -2,7 +2,7 @@
Steve Ebersole
:awestruct-tags: ["Hibernate ORM"]
:awestruct-layout: blog-post
-:docs-url: https://docs.jboss.org/hibernate/orm/6.0
+:docs-url: https://docs.hibernate.org/orm/6.0
:javadocs-url: {docs-url}/javadocs
:migration-guide-url: {docs-url}/migration-guide/migration-guide.html
:user-guide-url: {docs-url}/userguide/html_single/Hibernate_User_Guide.html
diff --git a/dialects.adoc b/dialects.adoc
index ffbf79132468..159b1e2462d2 100644
--- a/dialects.adoc
+++ b/dialects.adoc
@@ -1,73 +1,6 @@
= Dialects
-A dialect is a class that provides information about the specifics of a database and translators for the SQL dialect of the database.
+The content of this file has moved to link:./documentation/src/main/asciidoc/dialect/index.adoc.
+Going forward, it will be published in rendered form at https://docs.hibernate.org/stable/orm/dialect/.
-== Supported dialects
-
-Hibernate supports a wide range of dialects out of the box. The following is list of officially supported databases:
-
-* Apache Derby
-* Cockroach
-* Google Spanner
-* H2
-* HSQLDB
-* IBM DB2 LUW
-* IBM DB2 iSeries
-* IBM DB2 z/OS
-* MariaDB
-* MySQL
-* Oracle
-* PostgreSQL
-* Postgres Plus
-* SAP HANA
-* SQL Server
-* Sybase ASE
-
-Usually, Hibernate supports at least the database version that is also still supported by the respective vendor.
-In many cases though, Hibernate supports even older versions of the databases,
-but the support for these versions is not guaranteed.
-
-Apart from the Hibernate team supported dialects, there are also community dialects.
-
-== Community dialects
-
-As of Hibernate 6.0, the Hibernate team decided to provide a clear way forward for community contributed dialects.
-The `hibernate-core` artifact had many legacy dialects before 6.0 that were only tested and maintained on a best effort basis.
-
-More and more database vendors requested to integrate a dialect for their database and even provided a PR with a dialect,
-but the Hibernate team didn't want to add new dialects for databases that might not have a wide adoption
-or any automated testing into the `hibernate-core` artifact. Even though the dialect was supposedly maintained by the vendor,
-the Hibernate team was burdened with reviewing questions, issues and PRs that relate to these dialects.
-
-To give database vendors and the community a clear way forward, the Hibernate team decided to introduce a new artifact,
-called `hibernate-community-dialects` which is the new home for dialects that are maintained by vendors or individuals.
-Starting with Hibernate 6.0 the `hibernate-core` artifact will only contain dialects that are supported and tested by the Hibernate team.
-All the legacy dialects are moved to the `hibernate-community-dialects` artifact to have a clear separation based on the quality of the dialect.
-
-Issues with dialects in the `hibernate-community-dialects` are usually not considered by the Hibernate team,
-as the community is responsible for providing fixes and improving the dialects for newer database versions or ORM capabilities.
-
-== Requirements for moving to hibernate-core
-
-If a database vendor wants their database dialect to be included in the `hibernate-core` artifact,
-several requirements have to be fulfilled:
-
-* The vendor must provide access to a dedicated database server that can be used for testing
-* The vendor must provide contact details to at least one employee who is mainly responsible for the maintenance of the dialect
-* The responsible employee of the vendor must actively monitor and react to failures of the testsuite against the respective database
-* The responsible employee of the vendor must ensure the testsuite is configured correctly in order for it to succeed on the respective database
-* If the responsible employee of the vendor leaves the company, the vendor must provide contact details to a new responsible employee
-
-In case the responsible employee is unreachable for a longer period or issues with the dialect are not attended to in a timely manner,
-the Hibernate team will move the dialect back to the `hibernate-community-dialects` artifact.
-
-The requirements for the database server are:
-
-* JDK 8 installed through e.g. `sudo yum install -y java-1.8.0-openjdk-devel`
-* JDK 11 installed through e.g. `sudo yum install -y java-11-openjdk-devel`
-* Git installed through e.g. `sudo yum install -y git`
-* Access to the database through non-confidential credentials
-* Access via SSH through confidential credentials
-
-Get in touch with the Hibernate team on https://hibernate.zulipchat.com/#narrow/stream/132096-hibernate-user[Zulip]
-if you want to request the move of your dialect to hibernate-core.
\ No newline at end of file
+If you ended up here following a link, please ask whoever published that link to update it.
\ No newline at end of file
diff --git a/docker_db.sh b/docker_db.sh
index 8cdbce7d9be3..459ccc605b06 100755
--- a/docker_db.sh
+++ b/docker_db.sh
@@ -1,18 +1,26 @@
#! /bin/bash
-if command -v podman > /dev/null; then
+if command -v docker > /dev/null; then
+ CONTAINER_CLI=$(command -v docker)
+ HEALTCHECK_PATH="{{.State.Health.Status}}"
+ PRIVILEGED_CLI=""
+ IS_PODMAN=false
+ if [[ "$(docker version | grep Podman)" == "" ]]; then
+ IS_DOCKER_RUNTIME=true
+ else
+ IS_DOCKER_RUNTIME=false
+ fi
+else
CONTAINER_CLI=$(command -v podman)
HEALTCHECK_PATH="{{.State.Healthcheck.Status}}"
+ IS_PODMAN=true
+ IS_DOCKER_RUNTIME=false
# Only use sudo for podman
if command -v sudo > /dev/null; then
PRIVILEGED_CLI="sudo"
else
PRIVILEGED_CLI=""
fi
-else
- CONTAINER_CLI=$(command -v docker)
- HEALTCHECK_PATH="{{.State.Health.Status}}"
- PRIVILEGED_CLI=""
fi
mysql() {
@@ -21,7 +29,7 @@ mysql() {
mysql_8_0() {
$CONTAINER_CLI rm -f mysql || true
- $CONTAINER_CLI run --name mysql -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mysql:8.0.31 --character-set-server=utf8mb4 --collation-server=utf8mb4_0900_as_cs --skip-character-set-client-handshake --log-bin-trust-function-creators=1 --lower_case_table_names=2
+ $CONTAINER_CLI run --name mysql -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d ${DB_IMAGE_MYSQL_8_0:-docker.io/mysql:8.0.31} --character-set-server=utf8mb4 --collation-server=utf8mb4_0900_as_cs --skip-character-set-client-handshake --log-bin-trust-function-creators=1 --lower_case_table_names=2
# Give the container some time to start
OUTPUT=
n=0
@@ -45,7 +53,7 @@ mysql_8_0() {
mysql_8_1() {
$CONTAINER_CLI rm -f mysql || true
- $CONTAINER_CLI run --name mysql -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mysql:8.1.0 --character-set-server=utf8mb4 --collation-server=utf8mb4_0900_as_cs --skip-character-set-client-handshake --log-bin-trust-function-creators=1 --lower_case_table_names=2
+ $CONTAINER_CLI run --name mysql -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d ${DB_IMAGE_MYSQL_8_1:-docker.io/mysql:8.1.0} --character-set-server=utf8mb4 --collation-server=utf8mb4_0900_as_cs --skip-character-set-client-handshake --log-bin-trust-function-creators=1 --lower_case_table_names=2
# Give the container some time to start
OUTPUT=
n=0
@@ -69,7 +77,7 @@ mysql_8_1() {
mysql_8_2() {
$CONTAINER_CLI rm -f mysql || true
- $CONTAINER_CLI run --name mysql -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mysql:8.2.0 --character-set-server=utf8mb4 --collation-server=utf8mb4_0900_as_cs --skip-character-set-client-handshake --log-bin-trust-function-creators=1 --lower_case_table_names=2
+ $CONTAINER_CLI run --name mysql -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d ${DB_IMAGE_MYSQL_8_2:-docker.io/mysql:8.2.0} --character-set-server=utf8mb4 --collation-server=utf8mb4_0900_as_cs --skip-character-set-client-handshake --log-bin-trust-function-creators=1 --lower_case_table_names=2
# Give the container some time to start
OUTPUT=
n=0
@@ -92,7 +100,7 @@ mysql_8_2() {
}
mariadb() {
- mariadb_11_4
+ mariadb_11_7
}
mariadb_wait_until_start()
@@ -116,31 +124,37 @@ mariadb_wait_until_start()
mariadb_10_4() {
$CONTAINER_CLI rm -f mariadb || true
- $CONTAINER_CLI run --name mariadb -e MARIADB_USER=hibernate_orm_test -e MARIADB_PASSWORD=hibernate_orm_test -e MARIADB_DATABASE=hibernate_orm_test -e MARIADB_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mariadb:10.4.33 --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --lower_case_table_names=2
+ $CONTAINER_CLI run --name mariadb -e MARIADB_USER=hibernate_orm_test -e MARIADB_PASSWORD=hibernate_orm_test -e MARIADB_DATABASE=hibernate_orm_test -e MARIADB_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d ${DB_IMAGE_MARIADB_10_4:-docker.io/mariadb:10.4.33} --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --lower_case_table_names=2
mariadb_wait_until_start
}
mariadb_10_11() {
$CONTAINER_CLI rm -f mariadb || true
- $CONTAINER_CLI run --name mariadb -e MARIADB_USER=hibernate_orm_test -e MARIADB_PASSWORD=hibernate_orm_test -e MARIADB_DATABASE=hibernate_orm_test -e MARIADB_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mariadb:10.11.8 --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --lower_case_table_names=2
+ $CONTAINER_CLI run --name mariadb -e MARIADB_USER=hibernate_orm_test -e MARIADB_PASSWORD=hibernate_orm_test -e MARIADB_DATABASE=hibernate_orm_test -e MARIADB_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d ${DB_IMAGE_MARIADB_10_11:-docker.io/mariadb:10.11.8} --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --lower_case_table_names=2
mariadb_wait_until_start
}
mariadb_11_1() {
$CONTAINER_CLI rm -f mariadb || true
- $CONTAINER_CLI run --name mariadb -e MARIADB_USER=hibernate_orm_test -e MARIADB_PASSWORD=hibernate_orm_test -e MARIADB_DATABASE=hibernate_orm_test -e MARIADB_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mariadb:11.1.2 --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --lower_case_table_names=2
+ $CONTAINER_CLI run --name mariadb -e MARIADB_USER=hibernate_orm_test -e MARIADB_PASSWORD=hibernate_orm_test -e MARIADB_DATABASE=hibernate_orm_test -e MARIADB_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d ${DB_IMAGE_MARIADB_11_1:-docker.io/mariadb:11.1.2} --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --lower_case_table_names=2
mariadb_wait_until_start
}
mariadb_11_4() {
$CONTAINER_CLI rm -f mariadb || true
- $CONTAINER_CLI run --name mariadb -e MARIADB_USER=hibernate_orm_test -e MARIADB_PASSWORD=hibernate_orm_test -e MARIADB_DATABASE=hibernate_orm_test -e MARIADB_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d docker.io/mariadb:11.4.2 --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --lower_case_table_names=2
+ $CONTAINER_CLI run --name mariadb -e MARIADB_USER=hibernate_orm_test -e MARIADB_PASSWORD=hibernate_orm_test -e MARIADB_DATABASE=hibernate_orm_test -e MARIADB_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d ${DB_IMAGE_MARIADB_11_4:-docker.io/mariadb:11.4.2} --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --lower_case_table_names=2
+ mariadb_wait_until_start
+}
+
+mariadb_11_7() {
+ $CONTAINER_CLI rm -f mariadb || true
+ $CONTAINER_CLI run --name mariadb -e MARIADB_USER=hibernate_orm_test -e MARIADB_PASSWORD=hibernate_orm_test -e MARIADB_DATABASE=hibernate_orm_test -e MARIADB_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d ${DB_IMAGE_MARIADB_11_7:-docker.io/mariadb:11.7-rc} --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --lower_case_table_names=2
mariadb_wait_until_start
}
mariadb_verylatest() {
$CONTAINER_CLI rm -f mariadb || true
- $CONTAINER_CLI run --name mariadb -e MARIADB_USER=hibernate_orm_test -e MARIADB_PASSWORD=hibernate_orm_test -e MARIADB_DATABASE=hibernate_orm_test -e MARIADB_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d quay.io/mariadb-foundation/mariadb-devel:verylatest --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --lower_case_table_names=2
+ $CONTAINER_CLI run --name mariadb -e MARIADB_USER=hibernate_orm_test -e MARIADB_PASSWORD=hibernate_orm_test -e MARIADB_DATABASE=hibernate_orm_test -e MARIADB_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d ${DB_IMAGE_MARIADB_VERYLATEST:-quay.io/mariadb-foundation/mariadb-devel:verylatest} --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --skip-character-set-client-handshake --lower_case_table_names=2
mariadb_wait_until_start
}
@@ -150,32 +164,32 @@ postgresql() {
postgresql_12() {
$CONTAINER_CLI rm -f postgres || true
- $CONTAINER_CLI run --name postgres -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p5432:5432 -d docker.io/postgis/postgis:12-3.4
+ $CONTAINER_CLI run --name postgres -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p5432:5432 -d ${DB_IMAGE_POSTGRESQL_12:-docker.io/postgis/postgis:12-3.4}
$CONTAINER_CLI exec postgres bash -c '/usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y && apt install -y postgresql-12-pgvector && psql -U hibernate_orm_test -d hibernate_orm_test -c "create extension vector;"'
}
postgresql_13() {
$CONTAINER_CLI rm -f postgres || true
- $CONTAINER_CLI run --name postgres -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p5432:5432 -d docker.io/postgis/postgis:13-3.1
+ $CONTAINER_CLI run --name postgres -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p5432:5432 -d ${DB_IMAGE_POSTGRESQL_13:-docker.io/postgis/postgis:13-3.1}
$CONTAINER_CLI exec postgres bash -c '/usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y && apt install -y postgresql-13-pgvector && psql -U hibernate_orm_test -d hibernate_orm_test -c "create extension vector;"'
}
postgresql_14() {
$CONTAINER_CLI rm -f postgres || true
- $CONTAINER_CLI run --name postgres -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p5432:5432 -d docker.io/postgis/postgis:14-3.3
+ $CONTAINER_CLI run --name postgres -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p5432:5432 -d ${DB_IMAGE_POSTGRESQL_14:-docker.io/postgis/postgis:14-3.3}
$CONTAINER_CLI exec postgres bash -c '/usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y && apt install -y postgresql-14-pgvector && psql -U hibernate_orm_test -d hibernate_orm_test -c "create extension vector;"'
}
postgresql_15() {
$CONTAINER_CLI rm -f postgres || true
- $CONTAINER_CLI run --name postgres -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p5432:5432 --tmpfs /pgtmpfs:size=131072k -d docker.io/postgis/postgis:15-3.3 \
+ $CONTAINER_CLI run --name postgres -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p5432:5432 --tmpfs /pgtmpfs:size=131072k -d ${DB_IMAGE_POSTGRESQL_15:-docker.io/postgis/postgis:15-3.3} \
-c fsync=off -c synchronous_commit=off -c full_page_writes=off -c shared_buffers=256MB -c maintenance_work_mem=256MB -c max_wal_size=1GB -c checkpoint_timeout=1d
$CONTAINER_CLI exec postgres bash -c '/usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y && apt install -y postgresql-15-pgvector && psql -U hibernate_orm_test -d hibernate_orm_test -c "create extension vector;"'
}
postgresql_16() {
$CONTAINER_CLI rm -f postgres || true
- $CONTAINER_CLI run --name postgres -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p5432:5432 --tmpfs /pgtmpfs:size=131072k -d docker.io/postgis/postgis:16-3.4 \
+ $CONTAINER_CLI run --name postgres -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p5432:5432 --tmpfs /pgtmpfs:size=131072k -d ${DB_IMAGE_POSTGRESQL_16:-docker.io/postgis/postgis:16-3.4} \
-c fsync=off -c synchronous_commit=off -c full_page_writes=off -c shared_buffers=256MB -c maintenance_work_mem=256MB -c max_wal_size=1GB -c checkpoint_timeout=1d
$CONTAINER_CLI exec postgres bash -c '/usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y && apt install -y postgresql-16-pgvector && psql -U hibernate_orm_test -d hibernate_orm_test -c "create extension vector;"'
}
@@ -186,30 +200,42 @@ edb() {
edb_12() {
$CONTAINER_CLI rm -f edb || true
- # We need to build a derived image because the existing image is mainly made for use by a kubernetes operator
- (cd edb; $CONTAINER_CLI build -t edb-test:12 -f edb12.Dockerfile .)
- $CONTAINER_CLI run --name edb -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p 5444:5444 -d edb-test:12
+ if [[ -z "${DB_IMAGE_EDB}" ]]; then
+ DB_IMAGE_EDB="edb-test:12"
+ # We need to build a derived image because the existing image is mainly made for use by a kubernetes operator
+ (cd edb; $CONTAINER_CLI build -t edb-test:12 -f edb12.Dockerfile .)
+ fi
+ $CONTAINER_CLI run --name edb -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p 5444:5444 -d $DB_IMAGE_EDB
}
edb_14() {
$CONTAINER_CLI rm -f edb || true
- # We need to build a derived image because the existing image is mainly made for use by a kubernetes operator
- (cd edb; $CONTAINER_CLI build -t edb-test:14 -f edb14.Dockerfile .)
- $CONTAINER_CLI run --name edb -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p 5444:5444 -d edb-test:14
+ if [[ -z "${DB_IMAGE_EDB}" ]]; then
+ DB_IMAGE_EDB="edb-test:14"
+ # We need to build a derived image because the existing image is mainly made for use by a kubernetes operator
+ (cd edb; $CONTAINER_CLI build -t edb-test:14 -f edb14.Dockerfile .)
+ fi
+ $CONTAINER_CLI run --name edb -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p 5444:5444 -d $DB_IMAGE_EDB
}
edb_15() {
$CONTAINER_CLI rm -f edb || true
- # We need to build a derived image because the existing image is mainly made for use by a kubernetes operator
- (cd edb; $CONTAINER_CLI build -t edb-test:15 -f edb15.Dockerfile .)
- $CONTAINER_CLI run --name edb -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p 5444:5444 -d edb-test:15
+ if [[ -z "${DB_IMAGE_EDB}" ]]; then
+ DB_IMAGE_EDB="edb-test:15"
+ # We need to build a derived image because the existing image is mainly made for use by a kubernetes operator
+ (cd edb; $CONTAINER_CLI build -t edb-test:15 -f edb15.Dockerfile .)
+ fi
+ $CONTAINER_CLI run --name edb -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p 5444:5444 -d $DB_IMAGE_EDB
}
edb_16() {
$CONTAINER_CLI rm -f edb || true
- # We need to build a derived image because the existing image is mainly made for use by a kubernetes operator
- (cd edb; $CONTAINER_CLI build -t edb-test:16 -f edb16.Dockerfile .)
- $CONTAINER_CLI run --name edb -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p 5444:5444 -d edb-test:16
+ if [[ -z "${DB_IMAGE_EDB}" ]]; then
+ DB_IMAGE_EDB="edb-test:16"
+ # We need to build a derived image because the existing image is mainly made for use by a kubernetes operator
+ (cd edb; $CONTAINER_CLI build -t edb-test:16 -f edb16.Dockerfile .)
+ fi
+ $CONTAINER_CLI run --name edb -e POSTGRES_USER=hibernate_orm_test -e POSTGRES_PASSWORD=hibernate_orm_test -e POSTGRES_DB=hibernate_orm_test -p 5444:5444 -d $DB_IMAGE_EDB
}
db2() {
@@ -218,7 +244,7 @@ db2() {
db2_11_5() {
$PRIVILEGED_CLI $CONTAINER_CLI rm -f db2 || true
- $PRIVILEGED_CLI $CONTAINER_CLI run --name db2 --privileged -e DB2INSTANCE=orm_test -e DB2INST1_PASSWORD=orm_test -e DBNAME=orm_test -e LICENSE=accept -e AUTOCONFIG=false -e ARCHIVE_LOGS=false -e TO_CREATE_SAMPLEDB=false -e REPODB=false -p 50000:50000 -d icr.io/db2_community/db2:11.5.9.0
+ $PRIVILEGED_CLI $CONTAINER_CLI run --name db2 --privileged -e DB2INSTANCE=orm_test -e DB2INST1_PASSWORD=orm_test -e DBNAME=orm_test -e LICENSE=accept -e AUTOCONFIG=false -e ARCHIVE_LOGS=false -e TO_CREATE_SAMPLEDB=false -e REPODB=false -p 50000:50000 -d ${DB_IMAGE_DB2_11_5:-icr.io/db2_community/db2:11.5.9.0}
# Give the container some time to start
OUTPUT=
while [[ $OUTPUT != *"INSTANCE"* ]]; do
@@ -232,7 +258,7 @@ db2_11_5() {
db2_10_5() {
$PRIVILEGED_CLI $CONTAINER_CLI rm -f db2 || true
# The sha represents the tag 10.5.0.5-3.10.0
- $PRIVILEGED_CLI $CONTAINER_CLI run --name db2 --privileged -e DB2INST1_PASSWORD=db2inst1-pwd -e LICENSE=accept -p 50000:50000 -d docker.io/ibmoms/db2express-c@sha256:a499afd9709a1f69fb41703e88def9869955234c3525547e2efc3418d1f4ca2b db2start
+ $PRIVILEGED_CLI $CONTAINER_CLI run --name db2 --privileged -e DB2INST1_PASSWORD=db2inst1-pwd -e LICENSE=accept -p 50000:50000 -d ${DB_IMAGE_DB2_10_5:-quay.io/hibernate/db2express-c@sha256:a499afd9709a1f69fb41703e88def9869955234c3525547e2efc3418d1f4ca2b} db2start
# Give the container some time to start
OUTPUT=
while [[ $OUTPUT != *"DB2START"* ]]; do
@@ -293,7 +319,7 @@ CREATE TRANSFORM FOR db2gse.ST_Geometry DB2_PROGRAM (
EOF
$PRIVILEGED_CLI $CONTAINER_CLI run --name db2spatial --privileged -e DB2INSTANCE=orm_test -e DB2INST1_PASSWORD=orm_test -e DBNAME=orm_test -e LICENSE=accept -e AUTOCONFIG=false -e ARCHIVE_LOGS=false -e TO_CREATE_SAMPLEDB=false -e REPODB=false \
-v ${temp_dir}:/conf \
- -p 50000:50000 -d docker.io/ibmcom/db2:11.5.5.0
+ -p 50000:50000 -d ${DB_IMAGE_DB2_SPATIAL:-docker.io/ibmcom/db2:11.5.5.0}
# Give the container some time to start
OUTPUT=
@@ -317,7 +343,7 @@ mssql() {
mssql_2017() {
$CONTAINER_CLI rm -f mssql || true
#This sha256 matches a specific tag of mcr.microsoft.com/mssql/server:2017-latest :
- $CONTAINER_CLI run --name mssql -d -p 1433:1433 -e "SA_PASSWORD=Hibernate_orm_test" -e ACCEPT_EULA=Y mcr.microsoft.com/mssql/server@sha256:7d194c54e34cb63bca083542369485c8f4141596805611e84d8c8bab2339eede
+ $CONTAINER_CLI run --name mssql -d -p 1433:1433 -e "SA_PASSWORD=Hibernate_orm_test" -e ACCEPT_EULA=Y ${DB_IMAGE_MSSQL_2017:-mcr.microsoft.com/mssql/server@sha256:7d194c54e34cb63bca083542369485c8f4141596805611e84d8c8bab2339eede}
sleep 5
n=0
until [ "$n" -ge 5 ]
@@ -339,7 +365,7 @@ mssql_2017() {
mssql_2022() {
$CONTAINER_CLI rm -f mssql || true
#This sha256 matches a specific tag of 2022-CU12-ubuntu-22.04 (https://mcr.microsoft.com/en-us/product/mssql/server/tags):
- $CONTAINER_CLI run --name mssql -d -p 1433:1433 -e "SA_PASSWORD=Hibernate_orm_test" -e ACCEPT_EULA=Y mcr.microsoft.com/mssql/server@sha256:b94071acd4612bfe60a73e265097c2b6388d14d9d493db8f37cf4479a4337480
+ $CONTAINER_CLI run --name mssql -d -p 1433:1433 -e "SA_PASSWORD=Hibernate_orm_test" -e ACCEPT_EULA=Y ${DB_IMAGE_MSSQL_2022:-mcr.microsoft.com/mssql/server@sha256:b94071acd4612bfe60a73e265097c2b6388d14d9d493db8f37cf4479a4337480}
sleep 5
n=0
until [ "$n" -ge 5 ]
@@ -361,7 +387,7 @@ mssql_2022() {
sybase() {
$CONTAINER_CLI rm -f sybase || true
# Yup, that sucks, but on ubuntu we need to use -T11889 as per: https://github.com/DataGrip/docker-env/issues/12
- $CONTAINER_CLI run -d -p 9000:5000 -p 9001:5001 --name sybase --entrypoint /bin/bash docker.io/nguoianphu/docker-sybase -c "source /opt/sybase/SYBASE.sh
+ $CONTAINER_CLI run -d -p 9000:5000 -p 9001:5001 --name sybase --entrypoint /bin/bash ${DB_IMAGE_SYBASE:-docker.io/nguoianphu/docker-sybase} -c "source /opt/sybase/SYBASE.sh
/opt/sybase/ASE-16_0/bin/dataserver \
-d/opt/sybase/data/master.dat \
-e/opt/sybase/ASE-16_0/install/MYSYBASE.log \
@@ -640,26 +666,28 @@ EOF\""
}
disable_userland_proxy() {
- if [[ "$HEALTCHECK_PATH" == "{{.State.Health.Status}}" ]]; then
- if [[ ! -f /etc/docker/daemon.json ]]; then
- echo "Didn't find /etc/docker/daemon.json but need to disable userland-proxy..."
- echo "Stopping docker..."
- sudo service docker stop
- echo "Creating /etc/docker/daemon.json..."
- sudo bash -c "echo '{\"userland-proxy\": false}' > /etc/docker/daemon.json"
- echo "Starting docker..."
- sudo service docker start
- echo "Docker successfully started with userland proxies disabled"
- elif ! grep -q userland-proxy /etc/docker/daemon.json; then
- echo "Userland proxy is still enabled in /etc/docker/daemon.json, but need to disable it..."
- export docker_daemon_json=$( /etc/docker/daemon.json'
- echo "Starting docker..."
- sudo service docker start
- echo "Docker successfully started with userland proxies disabled"
+ if [[ "$IS_DOCKER_RUNTIME" == "true" ]]; then
+ if [[ "$HEALTCHECK_PATH" == "{{.State.Health.Status}}" ]]; then
+ if [[ ! -f /etc/docker/daemon.json ]]; then
+ echo "Didn't find /etc/docker/daemon.json but need to disable userland-proxy..."
+ echo "Stopping docker..."
+ sudo service docker stop
+ echo "Creating /etc/docker/daemon.json..."
+ sudo bash -c "echo '{\"userland-proxy\": false}' > /etc/docker/daemon.json"
+ echo "Starting docker..."
+ sudo service docker start
+ echo "Docker successfully started with userland proxies disabled"
+ elif ! grep -q userland-proxy /etc/docker/daemon.json; then
+ echo "Userland proxy is still enabled in /etc/docker/daemon.json, but need to disable it..."
+ export docker_daemon_json=$( /etc/docker/daemon.json'
+ echo "Starting docker..."
+ sudo service docker start
+ echo "Docker successfully started with userland proxies disabled"
+ fi
fi
fi
}
@@ -742,7 +770,7 @@ oracle_21() {
--health-interval 5s \
--health-timeout 5s \
--health-retries 10 \
- docker.io/gvenzl/oracle-xe:21.3.0
+ ${DB_IMAGE_ORACLE_21:-docker.io/gvenzl/oracle-xe:21.3.0}
oracle_setup
}
@@ -756,7 +784,7 @@ oracle_23() {
--health-interval 5s \
--health-timeout 5s \
--health-retries 10 \
- docker.io/gvenzl/oracle-free:23
+ ${DB_IMAGE_ORACLE_23:-docker.io/gvenzl/oracle-free:23}
oracle_free_setup
}
@@ -773,7 +801,7 @@ hana() {
--sysctl kernel.shmmni=4096 \
--sysctl kernel.shmall=8388608 \
-v $temp_dir:/config:Z \
- docker.io/saplabs/hanaexpress:2.00.072.00.20231123.1 \
+ ${DB_IMAGE_HANA:-docker.io/saplabs/hanaexpress:2.00.072.00.20231123.1} \
--passwords-url file:///config/password.json \
--agree-to-sap-license
# Give the container some time to start
@@ -800,7 +828,7 @@ sinks:
redact: false
exit-on-error: true
"
- $CONTAINER_CLI run -d --name=cockroach -m 6g -p 26257:26257 -p 8080:8080 docker.io/cockroachdb/cockroach:v23.1.12 start-single-node \
+ $CONTAINER_CLI run -d --name=cockroach -m 6g -p 26257:26257 -p 8080:8080 ${DB_IMAGE_COCKROACHDB_23_1:-docker.io/cockroachdb/cockroach:v23.1.12} start-single-node \
--insecure --store=type=mem,size=0.25 --advertise-addr=localhost --log="$LOG_CONFIG"
OUTPUT=
while [[ $OUTPUT != *"CockroachDB node starting"* ]]; do
@@ -841,7 +869,7 @@ sinks:
redact: false
exit-on-error: true
"
- $CONTAINER_CLI run -d --name=cockroach -m 6g -p 26257:26257 -p 8080:8080 docker.io/cockroachdb/cockroach:v22.2.2 start-single-node \
+ $CONTAINER_CLI run -d --name=cockroach -m 6g -p 26257:26257 -p 8080:8080 ${DB_IMAGE_COCKROACHDB_22_2:-docker.io/cockroachdb/cockroach:v22.2.2} start-single-node \
--insecure --store=type=mem,size=0.25 --advertise-addr=localhost --log="$LOG_CONFIG"
OUTPUT=
while [[ $OUTPUT != *"CockroachDB node starting"* ]]; do
@@ -881,7 +909,7 @@ tidb_5_4() {
$CONTAINER_CLI rm -f tidb || true
$CONTAINER_CLI network rm -f tidb_network || true
$CONTAINER_CLI network create tidb_network
- $CONTAINER_CLI run --name tidb -p4000:4000 -d --network tidb_network docker.io/pingcap/tidb:v5.4.3
+ $CONTAINER_CLI run --name tidb -p4000:4000 -d --network tidb_network ${DB_IMAGE_TIDB_5_4:-docker.io/pingcap/tidb:v5.4.3}
# Give the container some time to start
OUTPUT=
n=0
@@ -908,8 +936,11 @@ informix() {
}
informix_14_10() {
+ temp_dir=$(mktemp -d)
+ echo "ALLOW_NEWLINE 1" >$temp_dir/onconfig.mod
+ chmod 777 -R $temp_dir
$PRIVILEGED_CLI $CONTAINER_CLI rm -f informix || true
- $PRIVILEGED_CLI $CONTAINER_CLI run --name informix --privileged -p 9088:9088 -e LICENSE=accept -e GL_USEGLU=1 -d icr.io/informix/informix-developer-database:14.10.FC9W1DE
+ $PRIVILEGED_CLI $CONTAINER_CLI run --name informix --privileged -p 9088:9088 -v $temp_dir:/opt/ibm/config -e LICENSE=accept -e GL_USEGLU=1 -d ${DB_IMAGE_INFORMIX_14_10:-icr.io/informix/informix-developer-database:14.10.FC9W1DE}
echo "Starting Informix. This can take a few minutes"
# Give the container some time to start
OUTPUT=
@@ -935,7 +966,7 @@ informix_14_10() {
informix_12_10() {
$PRIVILEGED_CLI $CONTAINER_CLI rm -f informix || true
- $PRIVILEGED_CLI $CONTAINER_CLI run --name informix --privileged -p 9088:9088 -e LICENSE=accept -e GL_USEGLU=1 -d ibmcom/informix-developer-database:12.10.FC12W1DE
+ $PRIVILEGED_CLI $CONTAINER_CLI run --name informix --privileged -p 9088:9088 -e LICENSE=accept -e GL_USEGLU=1 -d ${DB_IMAGE_INFORMIX_12_10:-ibmcom/informix-developer-database:12.10.FC12W1DE}
echo "Starting Informix. This can take a few minutes"
# Give the container some time to start
OUTPUT=
@@ -977,6 +1008,7 @@ if [ -z ${1} ]; then
echo -e "\thana"
echo -e "\tmariadb"
echo -e "\tmariadb_verylatest"
+ echo -e "\tmariadb_11_7"
echo -e "\tmariadb_11_4"
echo -e "\tmariadb_11_1"
echo -e "\tmariadb_10_11"
diff --git a/documentation/documentation.gradle b/documentation/documentation.gradle
index 02b5f8829852..fbd22a24e531 100644
--- a/documentation/documentation.gradle
+++ b/documentation/documentation.gradle
@@ -34,7 +34,6 @@ repositories {
apply from: rootProject.file( 'gradle/module.gradle' )
-apply from: rootProject.file( 'gradle/releasable.gradle' )
apply plugin: 'org.hibernate.orm.build.reports'
@@ -139,6 +138,7 @@ dependencies {
reportAggregation project(':hibernate-ant')
reportAggregation project(':hibernate-enhance-maven-plugin')
reportAggregation project(':hibernate-jpamodelgen')
+ reportAggregation project(':hibernate-community-dialects')
asciidoctorGems 'rubygems:rouge:4.1.1'
@@ -182,6 +182,7 @@ dependencies {
javadocClasspath jakartaLibs.jsonbApi
javadocClasspath libs.ant
javadocClasspath dbLibs.postgresql
+ javadocClasspath dbLibs.edb
javadocClasspath libs.jackson
javadocClasspath gradleApi()
javadocClasspath libs.jacksonXml
@@ -192,10 +193,6 @@ dependencies {
if ( project.ormVersion.isSnapshot ) {
// only run the ci build tasks for SNAPSHOT versions
tasks.register('ciBuild') { dependsOn clean }
- tasks.release.enabled false
-}
-else {
- tasks.release.dependsOn clean
}
@@ -261,7 +258,10 @@ asciidoctorj {
fullVersion: rootProject.ormVersion.fullName,
javaCompatibleVersions: jdks.versions.compatible.get(),
jakartaJpaVersion: rootProject.jakartaJpaVersion,
- jdbcVersion: jdks.versions.jdbc.get()
+ jdbcVersion: jdks.versions.jdbc.get(),
+ 'root-project-dir': rootProject.layout.projectDirectory.asFile.absolutePath,
+ 'doc-main-dir': project(':documentation').layout.projectDirectory.dir('src').dir("main").asFile.absolutePath,
+ 'shared-attributes-dir': project(':documentation').layout.projectDirectory.dir('src').dir("main").dir("asciidoc").dir("shared").asFile.absolutePath
options logDocuments: true
}
@@ -573,7 +573,7 @@ settingsDocumentation {
}
transaction {
explicitPosition = 6
- summary = "Proxool Connection Pool Settings"
+ summary = "Transaction Environment Settings"
description = "Settings which control how Hibernate interacts with and manages transactions"
settingsClassName "org.hibernate.cfg.TransactionSettings"
}
@@ -691,7 +691,7 @@ def renderUserGuideHtmlTask = tasks.register( 'renderUserGuideHtml', Asciidoctor
inputs.property "hibernate-version", project.ormVersion
inputs.file( generateSettingsDocTask.get().outputFile )
- dependsOn generateSettingsDocTask, generateDialectTableReport
+ dependsOn generateSettingsDocTask
sourceDir = file( 'src/main/asciidoc/userguide' )
sources {
@@ -806,6 +806,30 @@ def renderMigrationGuideTask = tasks.register( "renderMigrationGuide", Asciidoct
}
}
+// Dialect Guide ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+def renderDialectGuideTask = tasks.register( 'renderDialectGuide', AsciidoctorTask ) { task ->
+ group = "Documentation"
+ description = 'Renders the Dialect guide in HTML format using Asciidoctor.'
+ inputs.property "hibernate-version", project.ormVersion
+ dependsOn generateDialectTableReport, generateCommunityDialectTableReport
+
+ sourceDir = file( 'src/main/asciidoc/dialect' )
+ sources 'dialect.adoc'
+ outputDir = layout.buildDirectory.dir( 'asciidoc/dialect' )
+
+ attributes linkcss: true, stylesheet: "css/hibernate.css",
+ 'generated-report-dir': layout.buildDirectory.dir( 'orm/generated' ).get()
+
+ task.resources {
+ from( 'src/main/style/asciidoctor' ) {
+ include 'images/**'
+ }
+ from( 'src/main/style/asciidoctor' ) {
+ include 'css/**'
+ }
+ }
+}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// ORM Reports
@@ -835,34 +859,8 @@ def renderLoggingReportTask = tasks.register( 'renderLoggingReport', Asciidoctor
}
}
-def renderDialectReportTask = tasks.register( 'renderDialectReport', AsciidoctorTask ) { task ->
- task.group = "hibernate-reports"
- task.description = 'Renders the supported Dialect report in HTML format using Asciidoctor.'
- task.dependsOn "generateDialectReport"
- task.dependsOn "generateDialectTableReport"
-
- task.inputs.property "version", project.ormVersion
-
- task.sourceDir = layout.buildDirectory.dir( 'orm/generated/dialect' )
- task.sources 'dialect.adoc'
-
- task.outputDir = project.layout.buildDirectory.dir( 'asciidoc/dialect' )
-
- task.attributes linkcss: true,
- stylesheet: "css/hibernate.css"
-
- task.resources {
- from( 'src/main/style/asciidoctor' ) {
- include 'images/**'
- }
- from( 'src/main/style/asciidoctor' ) {
- include 'css/**'
- }
- }
-}
-
def generateReportsTask = tasks.named( "generateReports" ) {
- dependsOn renderLoggingReportTask, renderDialectReportTask
+ dependsOn renderLoggingReportTask
}
@@ -881,6 +879,7 @@ def buildDocsTask = tasks.register( 'buildDocs' ) { task ->
task.dependsOn renderRepositoriesTask
task.dependsOn renderIntegrationGuidesTask
task.dependsOn renderTopicalGuidesTask
+ task.dependsOn renderDialectGuideTask
task.dependsOn generateReportsTask
task.dependsOn renderMigrationGuideTask
}
@@ -900,4 +899,11 @@ tasks.withType(AsciidoctorTask).configureEach {
separateOutputDirs = false
backends 'html5'
}
+ // See https://docs.asciidoctor.org/gradle-plugin/latest/common-task-configuration/#choosing-an-execution-mode-for-asciidoctorj
+ executionMode = org.ysb33r.grolifant.api.core.jvm.ExecutionMode.JAVA_EXEC
+}
+
+tasks.withType(AsciidoctorPdfTask).configureEach {
+ // See https://docs.asciidoctor.org/gradle-plugin/latest/common-task-configuration/#choosing-an-execution-mode-for-asciidoctorj
+ executionMode = org.ysb33r.grolifant.api.core.jvm.ExecutionMode.JAVA_EXEC
}
diff --git a/documentation/src/main/asciidoc/dialect/dialect.adoc b/documentation/src/main/asciidoc/dialect/dialect.adoc
new file mode 100644
index 000000000000..51a34fd3d7f1
--- /dev/null
+++ b/documentation/src/main/asciidoc/dialect/dialect.adoc
@@ -0,0 +1,107 @@
+include::{shared-attributes-dir}/common-attributes.adoc[]
+include::{shared-attributes-dir}/url-attributes.adoc[]
+include::{shared-attributes-dir}/filesystem-attributes.adoc[]
+include::{shared-attributes-dir}/renderer-attributes.adoc[]
+
+= Dialects
+:toc2:
+:toclevels: 1
+:sectanchors:
+
+A dialect is a class that provides information about the specifics of a database and translators for the SQL dialect of the database.
+
+== Supported dialects
+
+Hibernate ORM supports a wide range of dialects out of the box.
+
+Usually, Hibernate supports at least the database version that is also still supported by the respective vendor.
+In many cases though, Hibernate supports even older versions of the databases,
+but the support for these versions is not guaranteed.
+
+Below is a list of supported dialects and the minimum required version of the database.
+
+include::{generated-report-dir}/dialect/dialect-table.adoc[]
+
+[[third-party-dialects]]
+== Third-party dialects
+
+Third-parties publish additional dialects for Hibernate ORM, providing their own support for more databases, or extended support beyond what is built in Hibernate ORM.
+
+These dialects are not directly supported by the Hibernate team:
+
+* The Hibernate ORM CI does not run any test against these dialects, the dialect's authors have their own test suite.
+* The Hibernate team will not address issues reported against these dialect, but the dialect's authors have their own issue tracker.
+
+[NOTE]
+====
+Third-party dialects may not be compatible with all versions of Hibernate ORM.
+
+Check the dialect's own documentation to know more about its compatibility constraints.
+The https://hibernate.org/orm/releases/#compatibility-matrix[compatibility matrix on the Hibernate website] may also be of help.
+====
+
+Below is a list of third-party dialects with links to relevant websites.
+
+[cols="a,a", options="header"]
+|===
+|Dialect |Website
+|MongoDB| https://github.com/mongodb/mongo-hibernate/[MongoDB extension for Hibernate ORM]
+|Google Spanner| https://github.com/GoogleCloudPlatform/google-cloud-spanner-hibernate[Google Cloud Spanner Dialect for Hibernate ORM]
+|===
+
+[[community-dialects]]
+== Community dialects
+
+Community dialects are not included in `org.hibernate.orm:hibernate-core` and require an additional dependency to `org.hibernate.orm:hibernate-community-dialects`.
+
+These dialects are not directly supported by the Hibernate team:
+
+* The Hibernate ORM CI does not run any test against these dialects.
+* The Hibernate team will not address issues reported against these dialect.
+
+Instead, the dialects are maintained on a best-effort basis by vendors or individuals.
+
+Below is a list of community dialects and the minimum required version of the database.
+
+include::{generated-report-dir}/dialect/dialect-table-community.adoc[]
+
+[NOTE]
+====
+Community dialects were introduced in Hibernate ORM 6.0.
+
+The `hibernate-core` artifact had many legacy dialects before 6.0 that were only tested and maintained on a best effort basis.
+More and more database vendors requested to integrate a dialect for their database and even provided a PR with a dialect,
+but the Hibernate team didn't want to add new dialects for databases that might not have a wide adoption
+or any automated testing into the `hibernate-core` artifact. Even though the dialect was supposedly maintained by the vendor,
+the Hibernate team was burdened with reviewing questions, issues and PRs that relate to these dialects.
+
+To give database vendors and the community a clear way forward, the Hibernate team decided to introduce a new artifact,
+called `hibernate-community-dialects` which is the new home for dialects that are maintained by vendors or individuals.
+
+Moving forward, the `hibernate-core` artifact will only contain dialects that are supported and tested by the Hibernate team.
+All the legacy dialects were moved to the `hibernate-community-dialects` artifact to have a clear separation based on the quality of the dialect.
+====
+
+== Requirements for moving from `hibernate-community-dialects` to `hibernate-core`
+
+If a database vendor wants their database dialect to be included in the `hibernate-core` artifact,
+several requirements have to be fulfilled:
+
+* The vendor must provide access to a dedicated database server that can be used for testing
+* The vendor must provide contact details to at least one employee who is mainly responsible for the maintenance of the dialect
+* The responsible employee of the vendor must actively monitor and react to failures of the testsuite against the respective database
+* The responsible employee of the vendor must ensure the testsuite is configured correctly in order for it to succeed on the respective database
+* If the responsible employee of the vendor leaves the company, the vendor must provide contact details to a new responsible employee
+
+In case the responsible employee is unreachable for a longer period or issues with the dialect are not attended to in a timely manner,
+the Hibernate team will move the dialect back to the `hibernate-community-dialects` artifact.
+
+The requirements for the database server are:
+
+* JDK 17 installed
+* Git installed
+* Access to the database through non-confidential credentials
+* Access via SSH through confidential credentials
+
+Get in touch with the Hibernate team on https://hibernate.zulipchat.com/#narrow/stream/132096-hibernate-user[Zulip]
+if you want to request the move of your dialect to hibernate-core.
diff --git a/documentation/src/main/asciidoc/introduction/Configuration.adoc b/documentation/src/main/asciidoc/introduction/Configuration.adoc
index 9293cf6e0ce1..1dbdc1007dfe 100644
--- a/documentation/src/main/asciidoc/introduction/Configuration.adoc
+++ b/documentation/src/main/asciidoc/introduction/Configuration.adoc
@@ -69,6 +69,8 @@ driver for your database.
| Oracle | `com.oracle.database.jdbc:ojdbc11:${version}`
| H2 | `com.h2database:h2:{version}`
| HSQLDB | `org.hsqldb:hsqldb:{version}`
+| MongoDB | The JDBC driver is bundled with the dialect mentioned in <>
+| Google Spanner | `com.google.cloud:google-cloud-spanner-jdbc:{version}`
|===
Where `{version}` is the latest version of the JDBC driver for your databse.
@@ -118,6 +120,14 @@ and `com.github.ben-manes.caffeine:jcache`
or `org.eclipse:yasson`
| <> | `org.hibernate.orm:hibernate-spatial`
| <>, for auditing historical data | `org.hibernate.orm:hibernate-envers`
+| link:{doc-dialect-url}#community-dialects[Community dialects] | `org.hibernate.orm:hibernate-community-dialects`
+| link:{doc-dialect-url}#third-party-dialects[Third-party dialects]
+|
+https://github.com/mongodb/mongo-hibernate/[MongoDB]: `org.mongodb:mongodb-hibernate:{version}`
+
+https://github.com/GoogleCloudPlatform/google-cloud-spanner-hibernate[Google Spanner]: `com.google.cloud:google-cloud-spanner-hibernate-dialect:{version}`
+
+Where `{version}` is the version of the third-party dialect compatible with the version of Hibernate ORM you are using. See the dialect's own documentation for more information. The https://hibernate.org/orm/releases/#compatibility-matrix[compatibility matrix on the Hibernate website] may also be of help.
|===
You might also add the Hibernate {enhancer}[bytecode enhancer] to your
@@ -291,7 +301,7 @@ The properties you really do need to get started are these three:
====
In Hibernate 6, you don't need to specify `hibernate.dialect`.
The correct Hibernate SQL `Dialect` will be determined for you automatically.
-The only reason to specify this property is if you're using a custom user-written `Dialect` class.
+The only reason to specify this property is if you're using a custom user-written or link:{doc-dialect-url}#third-party-dialects[third-party] `Dialect` class.
Similarly, neither `hibernate.connection.driver_class` nor `jakarta.persistence.jdbc.driver` is needed when working with one of the supported databases.
====
diff --git a/documentation/src/main/asciidoc/introduction/Hibernate_Introduction.adoc b/documentation/src/main/asciidoc/introduction/Hibernate_Introduction.adoc
index cf93d8d8b9a5..6526db7ec935 100644
--- a/documentation/src/main/asciidoc/introduction/Hibernate_Introduction.adoc
+++ b/documentation/src/main/asciidoc/introduction/Hibernate_Introduction.adoc
@@ -1,5 +1,3 @@
-:shared-attributes-dir: ../shared/
-
include::{shared-attributes-dir}/common-attributes.adoc[]
include::{shared-attributes-dir}/url-attributes.adoc[]
include::{shared-attributes-dir}/filesystem-attributes.adoc[]
@@ -7,7 +5,7 @@ include::{shared-attributes-dir}/renderer-attributes.adoc[]
= An Introduction to Hibernate 6
-:title-logo-image: image:../../style/asciidoctor/images/org/hibernate/logo.png[]
+:title-logo-image: image:{doc-main-dir}/style/asciidoctor/images/org/hibernate/logo.png[]
:toc:
:toclevels: 3
diff --git a/documentation/src/main/asciidoc/introduction/Introduction.adoc b/documentation/src/main/asciidoc/introduction/Introduction.adoc
index d7d33e09787c..861aa5023c74 100644
--- a/documentation/src/main/asciidoc/introduction/Introduction.adoc
+++ b/documentation/src/main/asciidoc/introduction/Introduction.adoc
@@ -533,8 +533,8 @@ Whatever the case, the code which orchestrates a unit of work usually just calls
[source,java]
----
@GET
-@Path("books/{titlePattern}")
-public List findBooks(String titlePattern) {
+@Path("books/{titlePattern}/{page:\\d+}")
+public List findBooks(String titlePattern, int page) {
var books = sessionFactory.fromTransaction(session ->
Queries.findBooksByTitleWithPagination(session, titlePattern,
Page.page(RESULTS_PER_PAGE, page));
@@ -565,8 +565,8 @@ We can call it just like we called our handwritten version:
[source,java]
----
@GET
-@Path("books/{titlePattern}")
-public List findBooks(String titlePattern) {
+@Path("books/{titlePattern}/{page:\\d+}")
+public List findBooks(String titlePattern, int page) {
var books = sessionFactory.fromTransaction(session ->
Queries_.findBooksByTitleWithPagination(session, titlePattern,
Page.page(RESULTS_PER_PAGE, page));
diff --git a/documentation/src/main/asciidoc/querylanguage/Expressions.adoc b/documentation/src/main/asciidoc/querylanguage/Expressions.adoc
index 3778f5940a18..fdf8bd204eb3 100644
--- a/documentation/src/main/asciidoc/querylanguage/Expressions.adoc
+++ b/documentation/src/main/asciidoc/querylanguage/Expressions.adoc
@@ -720,7 +720,7 @@ Recognized Field types are listed below.
| `offset minute` | `Integer` | 0-59 | Minutes of offset | â
|===
-For a full list of field types, see the Javadoc for https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/query/TemporalUnit.html[`TemporalUnit`].
+For a full list of field types, see the Javadoc for https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/query/TemporalUnit.html[`TemporalUnit`].
[source, hql]
----
@@ -768,7 +768,7 @@ The pattern must be written in a subset of the pattern language defined by Java'
select format(local datetime as 'yyyy-MM-dd HH:mm:ss')
----
-For a full list of `format()` pattern elements, see the Javadoc for https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/dialect/Dialect.html#appendDatetimeFormat[`Dialect.appendDatetimeFormat`].
+For a full list of `format()` pattern elements, see the Javadoc for https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/dialect/Dialect.html#appendDatetimeFormat[`Dialect.appendDatetimeFormat`].
[[function-trunc-datetime]]
[discrete]
diff --git a/documentation/src/main/asciidoc/querylanguage/Hibernate_Query_Language.adoc b/documentation/src/main/asciidoc/querylanguage/Hibernate_Query_Language.adoc
index 8d0b63d13fba..c9c425b144e1 100644
--- a/documentation/src/main/asciidoc/querylanguage/Hibernate_Query_Language.adoc
+++ b/documentation/src/main/asciidoc/querylanguage/Hibernate_Query_Language.adoc
@@ -1,5 +1,3 @@
-:shared-attributes-dir: ../shared/
-
include::{shared-attributes-dir}/common-attributes.adoc[]
include::{shared-attributes-dir}/url-attributes.adoc[]
include::{shared-attributes-dir}/filesystem-attributes.adoc[]
@@ -10,7 +8,7 @@ include::{shared-attributes-dir}/renderer-attributes.adoc[]
:extrasdir: extras
= A Guide to Hibernate Query Language
-:title-logo-image: image:../../style/asciidoctor/images/org/hibernate/logo.png[]
+:title-logo-image: image:{doc-main-dir}/style/asciidoctor/images/org/hibernate/logo.png[]
:toc:
:toclevels: 3
diff --git a/documentation/src/main/asciidoc/querylanguage/Preface.adoc b/documentation/src/main/asciidoc/querylanguage/Preface.adoc
index 4976e23c64d7..d78667efc973 100644
--- a/documentation/src/main/asciidoc/querylanguage/Preface.adoc
+++ b/documentation/src/main/asciidoc/querylanguage/Preface.adoc
@@ -1,5 +1,3 @@
-:shared-attributes-dir: ../shared/
-
include::{shared-attributes-dir}/url-attributes.adoc[]
[[preface]]
diff --git a/documentation/src/main/asciidoc/quickstart/guides/index.adoc b/documentation/src/main/asciidoc/quickstart/guides/index.adoc
index 753eb12fc9f3..5074ed47aa7c 100644
--- a/documentation/src/main/asciidoc/quickstart/guides/index.adoc
+++ b/documentation/src/main/asciidoc/quickstart/guides/index.adoc
@@ -1,5 +1,3 @@
-:shared-attributes-dir: ../../shared/
-
include::{shared-attributes-dir}/common-attributes.adoc[]
include::{shared-attributes-dir}/url-attributes.adoc[]
include::{shared-attributes-dir}/filesystem-attributes.adoc[]
diff --git a/documentation/src/main/asciidoc/quickstart/guides/obtaining.adoc b/documentation/src/main/asciidoc/quickstart/guides/obtaining.adoc
index 81a9a507dcdc..351ec0d7b84c 100644
--- a/documentation/src/main/asciidoc/quickstart/guides/obtaining.adoc
+++ b/documentation/src/main/asciidoc/quickstart/guides/obtaining.adoc
@@ -58,7 +58,7 @@ transitive dependencies based on the features being used or not.
|hibernate-jcache| Integration with https://jcp.org/en/jsr/detail?id=107$$[JCache], allowing any compliant implementation as a second-level cache provider
|hibernate-graalvm| Experimental extension to make it easier to compile applications as a https://www.graalvm.org/[GraalVM] native image
|hibernate-micrometer| Integration with https://micrometer.io[Micrometer] metrics
-|hibernate-community-dialects| Additional community-supported SQL dialects
+|hibernate-community-dialects| Additional link:{doc-dialect-url}#community-dialects[community-supported SQL dialects]
|===
[cols="40m,~"]
@@ -67,6 +67,25 @@ transitive dependencies based on the features being used or not.
|hibernate-testing| A series of JUnit extensions for testing Hibernate ORM functionality
|===
+[cols="40m,~"]
+.Third-party modules
+|===
+// Yes, this is a full row containing asciidoc containing an admonition. I don't know have a better idea to add an admonition between a table's title and its content.
+2+a|
+[NOTE]
+====
+Third-party modules, and in particular link:{doc-dialect-url}#third-party-dialects[third-party dialects], are tested by their own authors,
+and may not be compatible with all versions of Hibernate ORM.
+
+1. Check the module's own documentation to know more about its compatibility constraints.
+The https://hibernate.org/orm/releases/#compatibility-matrix[compatibility matrix on the Hibernate website] may also be of help.
+2. Submit any question or bug reports about these dialects to the dialect's authors: the Hibernate team cannot help.
+
+====
+|`org.mongodb:mongodb-hibernate:{version}`| https://github.com/mongodb/mongo-hibernate/[MongoDB Extension for Hibernate ORM]
+|`com.google.cloud:google-cloud-spanner-hibernate-dialect:{version}`| https://github.com/GoogleCloudPlatform/google-cloud-spanner-hibernate[Google Cloud Spanner Dialect for Hibernate ORM]
+|===
+
[[platform]]
=== Platform / BOM
diff --git a/documentation/src/main/asciidoc/quickstart/guides/preface.adoc b/documentation/src/main/asciidoc/quickstart/guides/preface.adoc
index b44b21b3538c..773af9518e89 100644
--- a/documentation/src/main/asciidoc/quickstart/guides/preface.adoc
+++ b/documentation/src/main/asciidoc/quickstart/guides/preface.adoc
@@ -1,5 +1,3 @@
-:shared-attributes-dir: ../../shared/
-
include::{shared-attributes-dir}/url-attributes.adoc[]
include::{shared-attributes-dir}/filesystem-attributes.adoc[]
diff --git a/documentation/src/main/asciidoc/repositories/Hibernate_Data_Repositories.adoc b/documentation/src/main/asciidoc/repositories/Hibernate_Data_Repositories.adoc
index 9c5bd400580a..1a8f22b32b52 100644
--- a/documentation/src/main/asciidoc/repositories/Hibernate_Data_Repositories.adoc
+++ b/documentation/src/main/asciidoc/repositories/Hibernate_Data_Repositories.adoc
@@ -1,12 +1,10 @@
-:shared-attributes-dir: ../shared/
-
include::{shared-attributes-dir}/common-attributes.adoc[]
include::{shared-attributes-dir}/url-attributes.adoc[]
include::{shared-attributes-dir}/filesystem-attributes.adoc[]
include::{shared-attributes-dir}/renderer-attributes.adoc[]
= Introducing Hibernate Data Repositories
-:title-logo-image: image:../../style/asciidoctor/images/org/hibernate/logo.png[]
+:title-logo-image: image:{doc-main-dir}/style/asciidoctor/images/org/hibernate/logo.png[]
:toc:
:toclevels: 3
diff --git a/documentation/src/main/asciidoc/repositories/Preface.adoc b/documentation/src/main/asciidoc/repositories/Preface.adoc
index 81e76a50a0be..75b5f0a364e9 100644
--- a/documentation/src/main/asciidoc/repositories/Preface.adoc
+++ b/documentation/src/main/asciidoc/repositories/Preface.adoc
@@ -1,5 +1,3 @@
-:shared-attributes-dir: ../shared/
-
include::{shared-attributes-dir}/url-attributes.adoc[]
[[preface]]
@@ -16,4 +14,4 @@ On the other hand, the programming model for interacting with the database is qu
Therefore, this document will show you a different way to use Hibernate.
The coverage of Jakarta Data is intentionally inexhaustive.
-If exhaustion is sought, this document should be read in conjunction with the specification, which we've worked hard to keep readable.
\ No newline at end of file
+If exhaustion is sought, this document should be read in conjunction with the specification, which we've worked hard to keep readable.
diff --git a/documentation/src/main/asciidoc/shared/filesystem-attributes.adoc b/documentation/src/main/asciidoc/shared/filesystem-attributes.adoc
index 4b0c59edba7a..e29f07e36fcd 100644
--- a/documentation/src/main/asciidoc/shared/filesystem-attributes.adoc
+++ b/documentation/src/main/asciidoc/shared/filesystem-attributes.adoc
@@ -2,14 +2,12 @@
// Centralized definition of Asciidoc attributes for local filesystem paths
// ****************************************************************************
-:doc-main-dir: ../..
:doc-main-asciidoc-dir: {doc-main-dir}/asciidoc
:doc-main-style-dir: {doc-main-dir}/style
:pdf-theme: {doc-main-style-dir}/pdf/theme.yml
:pdf-fontsdir: {doc-main-style-dir}/pdf/fonts
-//:title-logo-image: {doc-main-style-dir}/asciidoctor/images/org/hibernate/logo.png[]
+//:title-logo-image: image:{doc-main-dir}/style/asciidoctor/images/org/hibernate/logo.png[]
-:root-project-dir: ../../../../..
:core-project-dir: {root-project-dir}/hibernate-core
:documentation-project-dir: {root-project-dir}/documentation
:testing-project-dir: {root-project-dir}/hibernate-testing
diff --git a/documentation/src/main/asciidoc/shared/url-attributes.adoc b/documentation/src/main/asciidoc/shared/url-attributes.adoc
index 12c29a944d4f..4497f0a7f76e 100644
--- a/documentation/src/main/asciidoc/shared/url-attributes.adoc
+++ b/documentation/src/main/asciidoc/shared/url-attributes.adoc
@@ -4,20 +4,20 @@
include::./common-attributes.adoc[]
-:doc-base-url: https://docs.jboss.org/hibernate/orm
+:doc-base-url: https://docs.hibernate.org/orm
:doc-version-base-url: {doc-base-url}/{majorMinorVersion}
-:doc-migration-guide-url: {doc-version-base-url}/migration-guide/migration-guide.html
+:doc-migration-guide-url: {doc-version-base-url}/migration-guide/
:doc-quick-start-url: {doc-version-base-url}/quickstart/html_single/
:doc-query-language-url: {doc-version-base-url}/querylanguage/html_single/Hibernate_Query_Language.html
:doc-introduction-url: {doc-version-base-url}/introduction/html_single/Hibernate_Introduction.html
:doc-user-guide-url: {doc-version-base-url}/userguide/html_single/Hibernate_User_Guide.html
:doc-javadoc-url: {doc-version-base-url}/javadocs/
:doc-topical-url: {doc-version-base-url}/topical/html_single/
-:doc-registries-url: {doc-topical-url}/registries/ServiceRegistries.html
-:doc-logging-url: {doc-topical-url}/logging/Logging.html
+:doc-registries-url: {doc-topical-url}/registries/
+:doc-logging-url: {doc-topical-url}/logging/
+:doc-dialect-url: {doc-version-base-url}/dialect/
:report-deprecation-url: {doc-version-base-url}/deprecated/deprecating.txt
-:report-dialect-url: {doc-version-base-url}/dialect/dialect.html
:report-incubating-url: {doc-version-base-url}/incubating/incubating.txt
:report-internals-url: {doc-version-base-url}/internals/internal.txt
-:report-logging-url: {doc-version-base-url}/logging/logging.html
+:report-logging-url: {doc-version-base-url}/logging/
diff --git a/documentation/src/main/asciidoc/topical/index.adoc b/documentation/src/main/asciidoc/topical/index.adoc
index 628562548d79..d4e575987cee 100644
--- a/documentation/src/main/asciidoc/topical/index.adoc
+++ b/documentation/src/main/asciidoc/topical/index.adoc
@@ -1,5 +1,3 @@
-:shared-attributes-dir: ../shared/
-
include::{shared-attributes-dir}/common-attributes.adoc[]
include::{shared-attributes-dir}/url-attributes.adoc[]
include::{shared-attributes-dir}/filesystem-attributes.adoc[]
diff --git a/documentation/src/main/asciidoc/topical/logging/Logging.adoc b/documentation/src/main/asciidoc/topical/logging/Logging.adoc
index 4422ff99f054..5db89c05dec1 100644
--- a/documentation/src/main/asciidoc/topical/logging/Logging.adoc
+++ b/documentation/src/main/asciidoc/topical/logging/Logging.adoc
@@ -1,5 +1,3 @@
-:shared-attributes-dir: ../../shared/
-
include::{shared-attributes-dir}/common-attributes.adoc[]
include::{shared-attributes-dir}/url-attributes.adoc[]
include::{shared-attributes-dir}/filesystem-attributes.adoc[]
diff --git a/documentation/src/main/asciidoc/userguide/Hibernate_User_Guide-docinfo.html b/documentation/src/main/asciidoc/userguide/Hibernate_User_Guide-docinfo.html
index 0fd842c784cc..8a452d99e232 100644
--- a/documentation/src/main/asciidoc/userguide/Hibernate_User_Guide-docinfo.html
+++ b/documentation/src/main/asciidoc/userguide/Hibernate_User_Guide-docinfo.html
@@ -1,5 +1,5 @@
-
-
+
+
diff --git a/documentation/src/main/asciidoc/userguide/Hibernate_User_Guide.adoc b/documentation/src/main/asciidoc/userguide/Hibernate_User_Guide.adoc
index 01e738fc3f8a..9bd5673a0f40 100644
--- a/documentation/src/main/asciidoc/userguide/Hibernate_User_Guide.adoc
+++ b/documentation/src/main/asciidoc/userguide/Hibernate_User_Guide.adoc
@@ -1,5 +1,3 @@
-:shared-attributes-dir: ../shared/
-
include::{shared-attributes-dir}/common-attributes.adoc[]
include::{shared-attributes-dir}/url-attributes.adoc[]
include::{shared-attributes-dir}/filesystem-attributes.adoc[]
diff --git a/documentation/src/main/asciidoc/userguide/Preface.adoc b/documentation/src/main/asciidoc/userguide/Preface.adoc
index 5c2de61d009f..613a012b1803 100644
--- a/documentation/src/main/asciidoc/userguide/Preface.adoc
+++ b/documentation/src/main/asciidoc/userguide/Preface.adoc
@@ -1,4 +1,3 @@
-:shared-attributes-dir: ../shared/
include::{shared-attributes-dir}/url-attributes.adoc[]
[[preface]]
diff --git a/documentation/src/main/asciidoc/userguide/appendices/LegacyBasicTypeResolution.adoc b/documentation/src/main/asciidoc/userguide/appendices/LegacyBasicTypeResolution.adoc
index 273be37a5aa8..bc3b941b57d7 100644
--- a/documentation/src/main/asciidoc/userguide/appendices/LegacyBasicTypeResolution.adoc
+++ b/documentation/src/main/asciidoc/userguide/appendices/LegacyBasicTypeResolution.adoc
@@ -122,7 +122,7 @@ But first, let's explore how implicit resolution works and how applications can
====
A thorough discussion of `BasicTypeRegistry` and all the different ways to contribute types is beyond the scope of this documentation.
-Please see the http://docs.jboss.org/hibernate/orm/{majorMinorVersion}/integrationguide/html_single/Hibernate_Integration_Guide.html[Integration Guide] for complete details.
+Please see the http://docs.hibernate.org/orm/{majorMinorVersion}/integrationguide/html_single/Hibernate_Integration_Guide.html[Integration Guide] for complete details.
====
As an example, take a String attribute such as we saw before with Product#sku.
@@ -319,4 +319,4 @@ When running the previous test case against the `BitSetUserType` entity mapping,
----
include::{originalextrasdir}/basic/basic-custom-type-BitSetUserType-persistence-sql-example.sql[]
----
-====
\ No newline at end of file
+====
diff --git a/documentation/src/main/asciidoc/userguide/chapters/bootstrap/Bootstrap.adoc b/documentation/src/main/asciidoc/userguide/chapters/bootstrap/Bootstrap.adoc
index c2db7fed499e..2c36fe596b6f 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/bootstrap/Bootstrap.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/bootstrap/Bootstrap.adoc
@@ -73,7 +73,7 @@ include::{example-dir-boot}/BootstrapTest.java[tags=bootstrap-bootstrap-native-r
====
A `StandardServiceRegistry` is also highly configurable via the StandardServiceRegistryBuilder API.
-See the `StandardServiceRegistryBuilder` https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/registry/StandardServiceRegistryBuilder.html[Javadocs] for more details.
+See the `StandardServiceRegistryBuilder` https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/registry/StandardServiceRegistryBuilder.html[Javadocs] for more details.
Some specific methods of interest:
@@ -107,7 +107,7 @@ The second step in native bootstrapping is the building of an `org.hibernate.boo
The first thing we obviously need to build a parsed representation is the source information to be parsed (annotated classes, `hbm.xml` files, `orm.xml` files).
This is the purpose of `org.hibernate.boot.MetadataSources`.
-`MetadataSources` has many other methods as well. Explore its API and https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/MetadataSources.html[Javadocs] for more information.
+`MetadataSources` has many other methods as well. Explore its API and https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/MetadataSources.html[Javadocs] for more information.
Also, all methods on `MetadataSources` offer fluent-style call chaining::
[[bootstrap-native-metadata-source-example]]
@@ -120,7 +120,7 @@ include::{example-dir-boot}/BootstrapTest.java[tags=bootstrap-native-metadata-so
====
Once we have the sources of mapping information defined, we need to build the `Metadata` object.
-If you are ok with the default behavior in building the Metadata then you can simply call the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/MetadataSources.html#buildMetadata--[`buildMetadata`] method of the `MetadataSources`.
+If you are ok with the default behavior in building the Metadata then you can simply call the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/MetadataSources.html#buildMetadata--[`buildMetadata`] method of the `MetadataSources`.
[NOTE]
====
@@ -132,7 +132,7 @@ From there, `MetadataBuilder`, `Metadata`, `SessionFactoryBuilder`, and `Session
However, if you wish to adjust the process of building `Metadata` from `MetadataSources`,
you will need to use the `MetadataBuilder` as obtained via `MetadataSources#getMetadataBuilder`.
`MetadataBuilder` allows a lot of control over the `Metadata` building process.
-See its https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/MetadataBuilder.html[Javadocs] for full details.
+See its https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/MetadataBuilder.html[Javadocs] for full details.
[[bootstrap-native-metadata-builder-example]]
.Building Metadata via `MetadataBuilder`
@@ -147,9 +147,9 @@ include::{example-dir-boot}/BootstrapTest.java[tags=bootstrap-native-metadata-bu
==== Building the SessionFactory
The final step in native bootstrapping is to build the `SessionFactory` itself.
-Much like discussed above, if you are ok with the default behavior of building a `SessionFactory` from a `Metadata` reference, you can simply call the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/Metadata.html#buildSessionFactory--[`buildSessionFactory`] method on the `Metadata` object.
+Much like discussed above, if you are ok with the default behavior of building a `SessionFactory` from a `Metadata` reference, you can simply call the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/Metadata.html#buildSessionFactory--[`buildSessionFactory`] method on the `Metadata` object.
-However, if you would like to adjust that building process, you will need to use `SessionFactoryBuilder` as obtained via `Metadata#getSessionFactoryBuilder`. Again, see its https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/Metadata.html#getSessionFactoryBuilder--[Javadocs] for more details.
+However, if you would like to adjust that building process, you will need to use `SessionFactoryBuilder` as obtained via `Metadata#getSessionFactoryBuilder`. Again, see its https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/Metadata.html#getSessionFactoryBuilder--[Javadocs] for more details.
[[bootstrap-native-SessionFactory-example]]
.Native Bootstrapping - Putting it all together
@@ -242,7 +242,7 @@ include::{example-dir-boot}/BootstrapTest.java[tags=bootstrap-jpa-compliant-Enti
====
If you don't want to provide a `persistence.xml` configuration file, Jakarta Persistence allows you to provide all the configuration options in a
{jpaJavadocUrlPrefix}spi/PersistenceUnitInfo.html[`PersistenceUnitInfo`] implementation and call
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/jpa/HibernatePersistenceProvider.html#createContainerEntityManagerFactory-jakarta.persistence.spi.PersistenceUnitInfo-java.util.Map-[`HibernatePersistenceProvider#createContainerEntityManagerFactory()`].
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/jpa/HibernatePersistenceProvider.html#createContainerEntityManagerFactory-jakarta.persistence.spi.PersistenceUnitInfo-java.util.Map-[`HibernatePersistenceProvider#createContainerEntityManagerFactory()`].
====
To inject the default Persistence Context, you can use the {jpaJavadocUrlPrefix}PersistenceContext.html[`@PersistenceContext`] annotation.
@@ -320,7 +320,7 @@ As previously seen, the Hibernate native bootstrap mechanism allows you to custo
When using Hibernate as a Jakarta Persistence provider, the `EntityManagerFactory` is backed by a `SessionFactory`. For this reason, you might still want to use the `Metadata` object to pass various settings which cannot be supplied via the standard Hibernate <>.
For this reason, you can use the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/spi/MetadataBuilderContributor.html[`MetadataBuilderContributor`] class as you can see in the following examples.
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/spi/MetadataBuilderContributor.html[`MetadataBuilderContributor`] class as you can see in the following examples.
[[bootstrap-jpa-compliant-MetadataBuilderContributor-example]]
.Implementing a `MetadataBuilderContributor`
@@ -335,6 +335,6 @@ org.hibernate.orm.test.bootstrap.spi.metadatabuildercontributor
The above `MetadataBuilderContributor` is used to register a `SqlFuction` which is not defined by the currently running Hibernate `Dialect`, but which we need to reference in our JPQL queries.
By having access to the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/MetadataBuilder.html[`MetadataBuilder`] class that's used by the underlying `SessionFactory`, the Jakarta Persistence bootstrap becomes just as flexible as the Hibernate native bootstrap mechanism.
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/boot/MetadataBuilder.html[`MetadataBuilder`] class that's used by the underlying `SessionFactory`, the Jakarta Persistence bootstrap becomes just as flexible as the Hibernate native bootstrap mechanism.
You can then pass the custom `MetadataBuilderContributor` via the `hibernate.metadata_builder_contributor` configuration property as explained in the <>.
diff --git a/documentation/src/main/asciidoc/userguide/chapters/caching/Caching.adoc b/documentation/src/main/asciidoc/userguide/chapters/caching/Caching.adoc
index b7ca0866857b..23a4b6bb3c38 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/caching/Caching.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/caching/Caching.adoc
@@ -39,7 +39,7 @@ Besides provider specific configuration, there are a number of configurations op
`hibernate.cache.use_second_level_cache`::
Enable or disable second level caching overall. By default, if the currently configured
- https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/cache/spi/RegionFactory.html[`RegionFactory`] is not the `NoCachingRegionFactory`, then the second-level cache is going to be enabled. Otherwise, the second-level cache is disabled.
+ https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/cache/spi/RegionFactory.html[`RegionFactory`] is not the `NoCachingRegionFactory`, then the second-level cache is going to be enabled. Otherwise, the second-level cache is disabled.
`hibernate.cache.use_query_cache`::
Enable or disable second level caching of query results. The default is false.
`hibernate.cache.query_cache_factory`::
@@ -120,7 +120,7 @@ transactional::
====
Rather than using a global setting, it is recommended to define the cache concurrency strategy on a per entity basis.
-Use the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/Cache.html[`@org.hibernate.annotations.Cache`] annotation for this purpose.
+Use the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/Cache.html[`@org.hibernate.annotations.Cache`] annotation for this purpose.
====
The `@Cache` annotation define three attributes:
@@ -377,7 +377,7 @@ include::{example-dir-caching}/SecondLevelCacheTest.java[tags=caching-query-regi
[NOTE]
====
-When using {jpaJavadocUrlPrefix}CacheStoreMode.html#REFRESH[`CacheStoreMode.REFRESH`] or https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/CacheMode.html#REFRESH[`CacheMode.REFRESH`] in conjunction with the region you have defined for the given query,
+When using {jpaJavadocUrlPrefix}CacheStoreMode.html#REFRESH[`CacheStoreMode.REFRESH`] or https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/CacheMode.html#REFRESH[`CacheMode.REFRESH`] in conjunction with the region you have defined for the given query,
Hibernate will selectively force the results cached in that particular region to be refreshed.
This behavior is particularly useful in cases when the underlying data may have been updated via a separate process
@@ -419,7 +419,7 @@ by placing the annotation on the entity class or the persistent collection attri
[[caching-management]]
=== Managing the cached data
-Traditionally, Hibernate defined the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/CacheMode.html[`CacheMode`] enumeration to describe
+Traditionally, Hibernate defined the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/CacheMode.html[`CacheMode`] enumeration to describe
the ways of interactions with the cached data.
Jakarta Persistence split cache modes by storage ({jpaJavadocUrlPrefix}CacheStoreMode.html[`CacheStoreMode`])
and retrieval ({jpaJavadocUrlPrefix}CacheRetrieveMode.html[`CacheRetrieveMode`]).
@@ -495,7 +495,7 @@ include::{example-dir-caching}/SecondLevelCacheTest.java[tags=caching-management
====
Hibernate is much more flexible in this regard as it offers fine-grained control over what needs to be evicted.
-The https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/Cache.html[`org.hibernate.Cache`] interface defines various evicting strategies:
+The https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/Cache.html[`org.hibernate.Cache`] interface defines various evicting strategies:
- entities (by their class or region)
- entities stored using the natural-id (by their class or region)
@@ -518,7 +518,7 @@ If you enable the `hibernate.generate_statistics` configuration property,
Hibernate will expose a number of metrics via `SessionFactory.getStatistics()`.
Hibernate can even be configured to expose these statistics via JMX.
-This way, you can get access to the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/stat/Statistics.html[`Statistics`] class which comprises all sort of
+This way, you can get access to the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/stat/Statistics.html[`Statistics`] class which comprises all sort of
second-level cache metrics.
[[caching-statistics-example]]
diff --git a/documentation/src/main/asciidoc/userguide/chapters/compatibility/Compatibility.adoc b/documentation/src/main/asciidoc/userguide/chapters/compatibility/Compatibility.adoc
index 49bae6674020..f554dbe452a1 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/compatibility/Compatibility.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/compatibility/Compatibility.adoc
@@ -70,7 +70,7 @@ Maven::
[[compatibility-database]]
=== Database
-Hibernate {fullVersion} is compatible with the following database versions,
-provided you use the corresponding <>:
+Hibernate {fullVersion}'s compatibility with a given database and version
+depends on the dialect being used.
-include::{generated-report-dir}/dialect/dialect-table.adoc[]
+Refer to the link:{doc-dialect-url}[Dialects] guide for details about both dialects and supported databases.
diff --git a/documentation/src/main/asciidoc/userguide/chapters/domain/associations.adoc b/documentation/src/main/asciidoc/userguide/chapters/domain/associations.adoc
index 991646853b23..e762e4d6ec67 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/domain/associations.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/domain/associations.adoc
@@ -784,7 +784,7 @@ include::{extrasdir}/associations-many-to-any-query-example.sql[]
[[associations-JoinFormula]]
==== `@JoinFormula` mapping
-The https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/JoinFormula.html[`@JoinFormula`] annotation is used to customize the join between a child Foreign Key and a parent row Primary Key.
+The https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/JoinFormula.html[`@JoinFormula`] annotation is used to customize the join between a child Foreign Key and a parent row Primary Key.
[[associations-JoinFormula-example]]
.`@JoinFormula` mapping usage
@@ -834,7 +834,7 @@ Therefore, the `@JoinFormula` annotation is used to define a custom join associa
[[associations-JoinColumnOrFormula]]
==== `@JoinColumnOrFormula` mapping
-The https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/JoinColumnOrFormula.html[`@JoinColumnOrFormula`] annotation is used to customize the join between a child Foreign Key and a parent row Primary Key when we need to take into consideration a column value as well as a `@JoinFormula`.
+The https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/JoinColumnOrFormula.html[`@JoinColumnOrFormula`] annotation is used to customize the join between a child Foreign Key and a parent row Primary Key when we need to take into consideration a column value as well as a `@JoinFormula`.
[[associations-JoinColumnOrFormula-example]]
.`@JoinColumnOrFormula` mapping usage
diff --git a/documentation/src/main/asciidoc/userguide/chapters/domain/basic_types.adoc b/documentation/src/main/asciidoc/userguide/chapters/domain/basic_types.adoc
index 63307989a298..6870d77982c0 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/domain/basic_types.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/domain/basic_types.adoc
@@ -2023,7 +2023,7 @@ include::{example-dir-resources}/mapping/converter/hbm/MoneyConverterHbmTest.hbm
A basic type that's converted by a Jakarta Persistence `AttributeConverter` is immutable if the underlying Java type is immutable
and is mutable if the associated attribute type is mutable as well.
-Therefore, mutability is given by the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/type/descriptor/java/JavaType.html#getMutabilityPlan--[`JavaType#getMutabilityPlan`]
+Therefore, mutability is given by the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/type/descriptor/java/JavaType.html#getMutabilityPlan--[`JavaType#getMutabilityPlan`]
of the associated entity attribute type.
This can be adjusted by using `@Immutable` or `@Mutability` on any of:
@@ -2603,8 +2603,8 @@ include::{example-dir-generated}/temporals/GeneratedUuidTests.java[tags=mapping-
----
====
-See https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/ValueGenerationType.html[`@ValueGenerationType`]
-and https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/tuple/AnnotationValueGeneration.html[`AnnotationValueGeneration`]
+See https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/ValueGenerationType.html[`@ValueGenerationType`]
+and https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/generator/tuple/AnnotationValueGeneration.html[`AnnotationBasedGenerator`]
for details of each contract
diff --git a/documentation/src/main/asciidoc/userguide/chapters/domain/collections.adoc b/documentation/src/main/asciidoc/userguide/chapters/domain/collections.adoc
index 94a48f910566..2b8fe67d4497 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/domain/collections.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/domain/collections.adoc
@@ -5,7 +5,7 @@
:core-project-dir: {root-project-dir}/hibernate-core
:core-test-base: {core-project-dir}/src/test/java
:example-dir-collection: {core-test-base}/org/hibernate/orm/test/mapping/collections
-:docs-base: https://docs.jboss.org/hibernate/orm/{majorMinorVersion}
+:docs-base: https://docs.hibernate.org/orm/{majorMinorVersion}
:javadoc-base: {docs-base}/javadoc
:java-javadoc-base: https://docs.oracle.com/en/java/javase/11/docs/api/java.base
:extrasdir: extras/collections
@@ -715,7 +715,7 @@ When fetching the collection, Hibernate will use the fetched ordered columns to
[[collections-customizing-ordered-list-ordinal]]
===== Customizing ordered list ordinal
-You can customize the ordinal of the underlying ordered list by using the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/ListIndexBase.html[`@ListIndexBase`] annotation.
+You can customize the ordinal of the underlying ordered list by using the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/ListIndexBase.html[`@ListIndexBase`] annotation.
[[collections-customizing-ordered-list-ordinal-mapping-example]]
.`@ListIndexBase` mapping example
@@ -748,7 +748,7 @@ include::{extrasdir}/collections-customizing-ordered-list-ordinal-persist-exampl
While the Jakarta Persistence
{jpaJavadocUrlPrefix}OrderBy.html[`@OrderBy`] annotation allows you to specify the entity attributes used for sorting
when fetching the current annotated collection, the Hibernate specific
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/OrderBy.html[`@OrderBy`] annotation is used to specify a *SQL* clause instead.
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/OrderBy.html[`@OrderBy`] annotation is used to specify a *SQL* clause instead.
In the following example, the `@OrderBy` annotation uses the `CHAR_LENGTH` SQL function to order the `Article` entities
by the number of characters of the `name` attribute.
@@ -929,7 +929,7 @@ include::{extrasdir}/collections-map-value-type-entity-key-add-example.sql[]
===== Maps with a custom key type
Hibernate defines the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/MapKeyType.html[`@MapKeyType`] annotation
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/MapKeyType.html[`@MapKeyType`] annotation
which you can use to customize the `Map` key type.
Considering you have the following tables in your database:
diff --git a/documentation/src/main/asciidoc/userguide/chapters/domain/embeddables.adoc b/documentation/src/main/asciidoc/userguide/chapters/domain/embeddables.adoc
index dd8af3f81cd7..568092759e66 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/domain/embeddables.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/domain/embeddables.adoc
@@ -161,7 +161,7 @@ Embeddable types that are used as collection entries, map keys or entity type id
[[embeddable-Target]]
==== `@Target` mapping
-The https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/Target.html[`@Target`] annotation is used to specify the implementation class of a given association that is mapped via an interface.
+The https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/Target.html[`@Target`] annotation is used to specify the implementation class of a given association that is mapped via an interface.
The
{jpaJavadocUrlPrefix}ManyToOne.html[`@ManyToOne`],
{jpaJavadocUrlPrefix}OneToOne.html[`@OneToOne`],
@@ -617,4 +617,4 @@ Again, the name and the nullability of the `aggregate` column can be refined thr
Mapping <> inside an `@Embeddable` value is supported in most cases. There are a couple exceptions:
* If the values of an <> is of embeddable type, that embeddable cannot contain nested collections;
-* Explicitly selecting an embeddable that contains collections in a query is currently not supported (we wouldn't be able to correctly initialize the collection since its owning entity instance would be missing from the Persistence Context).
\ No newline at end of file
+* Explicitly selecting an embeddable that contains collections in a query is currently not supported (we wouldn't be able to correctly initialize the collection since its owning entity instance would be missing from the Persistence Context).
diff --git a/documentation/src/main/asciidoc/userguide/chapters/domain/entity.adoc b/documentation/src/main/asciidoc/userguide/chapters/domain/entity.adoc
index 0f543a7f6340..3850c55bdfb4 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/domain/entity.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/domain/entity.adoc
@@ -405,7 +405,7 @@ For details on mapping the identifier, see the < cls, String entityName, Number revision, boolean includeDeletions)`]
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/envers/query/AuditQueryCreator.html#forEntitiesAtRevision-java.lang.Class-java.lang.String-java.lang.Number-boolean-[`forEntitiesAtRevision(Class cls, String entityName, Number revision, boolean includeDeletions)`]
method to get the deleted entity revision so that, instead of a `NoResultException`,
all attributes, except for the entity identifier, are going to be `null`.
@@ -190,7 +190,7 @@ include::{example-dir-envers}/DefaultAuditTest.java[tags=envers-audited-rev4-exa
----
====
-See the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/envers/AuditReader.html[Javadocs] for details on other functionality offered.
+See the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/envers/AuditReader.html[Javadocs] for details on other functionality offered.
[[envers-configuration]]
=== Configuration Properties
@@ -499,7 +499,7 @@ Simply add the custom revision entity as you do your normal entities and Envers
NOTE: It is an error for there to be multiple entities marked as `@org.hibernate.envers.RevisionEntity`.
. Second, you need to tell Envers how to create instances of your revision entity which is handled by the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/envers/RevisionListener.html#newRevision-java.lang.Object-[`newRevision( Object revisionEntity )`]
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/envers/RevisionListener.html#newRevision-java.lang.Object-[`newRevision( Object revisionEntity )`]
method of the `org.hibernate.envers.RevisionListener` interface.
+
You tell Envers your custom `org.hibernate.envers.RevisionListener` implementation to use by specifying it on the `@org.hibernate.envers.RevisionEntity` annotation, using the value attribute.
@@ -578,7 +578,7 @@ As demonstrated by the example above, the username is properly set and propagate
**This strategy is deprecated since version 5.2. The alternative is to use dependency injection offered as of version 5.3.**
An alternative method to using the `org.hibernate.envers.RevisionListener` is to instead call the
-[line-through]#https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/envers/AuditReader.html#getCurrentRevision-java.lang.Class-boolean-[`getCurrentRevision( Class revisionEntityClass, boolean persist )`]#
+[line-through]#https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/envers/AuditReader.html#getCurrentRevision-java.lang.Class-boolean-[`getCurrentRevision( Class revisionEntityClass, boolean persist )`]#
method of the `org.hibernate.envers.AuditReader` interface to obtain the current revision,
and fill it with desired information.
diff --git a/documentation/src/main/asciidoc/userguide/chapters/fetching/Fetching.adoc b/documentation/src/main/asciidoc/userguide/chapters/fetching/Fetching.adoc
index a70d9733cca8..6a0cbf5af2e8 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/fetching/Fetching.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/fetching/Fetching.adoc
@@ -411,7 +411,7 @@ However, if the `Employee` data is not resolved in cache, the `Employee` and `Pr
[[fetching-batch]]
=== Batch fetching
-Hibernate offers the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/BatchSize.html[`@BatchSize`] annotation,
+Hibernate offers the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/BatchSize.html[`@BatchSize`] annotation,
which can be used when fetching uninitialized entity proxies.
Considering the following entity mapping:
@@ -601,8 +601,8 @@ This time, there was no secondary query because the child collection was loaded
[[fetching-LazyCollection]]
=== `@LazyCollection`
-The https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/LazyCollection.html[`@LazyCollection`] annotation is used to specify the lazy fetching behavior of a given collection.
-The possible values are given by the `https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/LazyCollectionOption.html[LazyCollectionOption]` enumeration:
+The https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/LazyCollection.html[`@LazyCollection`] annotation is used to specify the lazy fetching behavior of a given collection.
+The possible values are given by the `https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/LazyCollectionOption.html[LazyCollectionOption]` enumeration:
`TRUE`:: Load it when the state is requested.
`FALSE`:: Eagerly load it.
diff --git a/documentation/src/main/asciidoc/userguide/chapters/flushing/Flushing.adoc b/documentation/src/main/asciidoc/userguide/chapters/flushing/Flushing.adoc
index 63da093aa80e..0a66900e8d14 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/flushing/Flushing.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/flushing/Flushing.adoc
@@ -18,7 +18,7 @@ Because DML statements are grouped together, Hibernate can apply batching transp
See the <> for more information.
====
-The flushing strategy is given by the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/Session.html#getFlushMode--[`flushMode`] of the current running Hibernate `Session`.
+The flushing strategy is given by the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/Session.html#getFlushMode--[`flushMode`] of the current running Hibernate `Session`.
Although Jakarta Persistence defines only two flushing strategies ({jpaJavadocUrlPrefix}FlushModeType.html#AUTO[`AUTO`] and {jpaJavadocUrlPrefix}FlushModeType.html#COMMIT[`COMMIT`]),
Hibernate has a much broader spectrum of flush types:
diff --git a/documentation/src/main/asciidoc/userguide/chapters/jdbc/Database_Access.adoc b/documentation/src/main/asciidoc/userguide/chapters/jdbc/Database_Access.adoc
index b8fafb17eb7e..b0f37b1ce271 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/jdbc/Database_Access.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/jdbc/Database_Access.adoc
@@ -8,7 +8,7 @@ As an ORM tool, probably the single most important thing you need to tell Hibern
This is ultimately the function of the `org.hibernate.engine.jdbc.connections.spi.ConnectionProvider` interface.
Hibernate provides some out of the box implementations of this interface.
`ConnectionProvider` is also an extension point so you can also use custom implementations from third parties or written yourself.
-The `ConnectionProvider` to use is defined by the `hibernate.connection.provider_class` setting. See the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/cfg/AvailableSettings.html#CONNECTION_PROVIDER[`org.hibernate.cfg.AvailableSettings#CONNECTION_PROVIDER`]
+The `ConnectionProvider` to use is defined by the `hibernate.connection.provider_class` setting. See the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/cfg/AvailableSettings.html#CONNECTION_PROVIDER[`org.hibernate.cfg.AvailableSettings#CONNECTION_PROVIDER`]
Generally speaking, applications should not have to configure a `ConnectionProvider` explicitly if using one of the Hibernate-provided implementations.
Hibernate will internally determine which `ConnectionProvider` to use based on the following algorithm:
@@ -230,7 +230,7 @@ Again, this is only supported for JDBC standard isolation levels, not for isolat
=== Connection handling
The connection handling mode is defined by the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/resource/jdbc/spi/PhysicalConnectionHandlingMode.html[`PhysicalConnectionHandlingMode`] enumeration which provides the following strategies:
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/resource/jdbc/spi/PhysicalConnectionHandlingMode.html[`PhysicalConnectionHandlingMode`] enumeration which provides the following strategies:
`IMMEDIATE_ACQUISITION_AND_HOLD`::
The `Connection` will be acquired as soon as the `Session` is opened and held until the `Session` is closed.
@@ -276,8 +276,8 @@ and no connection leak false positive is being reported, then you should conside
==== User-provided connections
If the current `Session` was created using the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/SessionBuilder.html[`SessionBuilder`] and a JDBC `Connection` was provided via the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/SessionBuilder.html#connection-java.sql.Connection-[`SessionBuilder#connection`] method, then the user-provided `Connection` is going to be used, and
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/SessionBuilder.html[`SessionBuilder`] and a JDBC `Connection` was provided via the
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/SessionBuilder.html#connection-java.sql.Connection-[`SessionBuilder#connection`] method, then the user-provided `Connection` is going to be used, and
the connection handling mode will be `IMMEDIATE_ACQUISITION_AND_HOLD`.
Therefore for user-provided connection, the connection is acquired right away and held until the current `Session` is closed, without being influenced by the Jakarta Persistence or Hibernate transaction context.
@@ -292,5 +292,7 @@ Hibernate abstracts over variations between dialects of SQL via the class `org.h
- There's a subclass of `Dialect` for each supported relational database in the package `org.hibernate.dialect`.
- Additional community-supported ``Dialect``s are available in the separate module `hibernate-community-dialects`.
-In Hibernate 6, it's no longer necessary to explicitly specify a dialect using the configuration property `hibernate.dialect`, and so setting that property is now discouraged.
-(An exception is the case of custom user-written ``Dialect``s.)
\ No newline at end of file
+Starting with Hibernate 6, it's no longer necessary to explicitly specify a dialect using the configuration property `hibernate.dialect`, and so setting that property is now discouraged.
+An exception is the case of custom or link:{doc-dialect-url}#third-party-dialects[third-party] ``Dialect``s.
+
+NOTE: For information about available dialects and compatible database versions, see the link:{doc-dialect-url}[dialect guide].
diff --git a/documentation/src/main/asciidoc/userguide/chapters/locking/Locking.adoc b/documentation/src/main/asciidoc/userguide/chapters/locking/Locking.adoc
index e1a0aa2fc7c7..997c04eb9dc1 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/locking/Locking.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/locking/Locking.adoc
@@ -146,7 +146,7 @@ include::{extrasdir}/locking-optimistic-version-timestamp-source-persist-example
By default, every entity attribute modification is going to trigger a version incrementation.
If there is an entity property which should not bump up the entity version,
-then you need to annotate it with the Hibernate https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/OptimisticLock.html[`@OptimisticLock`] annotation,
+then you need to annotate it with the Hibernate https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/OptimisticLock.html[`@OptimisticLock`] annotation,
as illustrated in the following example.
[[locking-optimistic-exclude-attribute-mapping-example]]
@@ -197,9 +197,9 @@ This is also useful for use with modeling legacy schemas.
The idea is that you can get Hibernate to perform "version checks" using either all of the entity's attributes or just the attributes that have changed.
This is achieved through the use of the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/OptimisticLocking.html[`@OptimisticLocking`]
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/OptimisticLocking.html[`@OptimisticLocking`]
annotation which defines a single attribute of type
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/OptimisticLockType.html[`org.hibernate.annotations.OptimisticLockType`].
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/OptimisticLockType.html[`org.hibernate.annotations.OptimisticLockType`].
There are 4 available OptimisticLockTypes:
@@ -291,7 +291,7 @@ is that it allows you to minimize the risk of `OptimisticEntityLockException` ac
When using `OptimisticLockType.DIRTY`, you should also use `@DynamicUpdate` because the `UPDATE` statement must take into consideration all the dirty entity property values,
and also the `@SelectBeforeUpdate` annotation so that detached entities are properly handled by the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/Session.html#update-java.lang.Object-[`Session#update(entity)`] operation.
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/Session.html#update-java.lang.Object-[`Session#update(entity)`] operation.
====
[[locking-pessimistic]]
diff --git a/documentation/src/main/asciidoc/userguide/chapters/multitenancy/MultiTenancy.adoc b/documentation/src/main/asciidoc/userguide/chapters/multitenancy/MultiTenancy.adoc
index b30f83d1d12e..e9608b5dc643 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/multitenancy/MultiTenancy.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/multitenancy/MultiTenancy.adoc
@@ -121,7 +121,7 @@ It could name a `MultiTenantConnectionProvider` instance, a `MultiTenantConnecti
* Passed directly to the `org.hibernate.boot.registry.StandardServiceRegistryBuilder`.
* If none of the above options match, but the settings do specify a `hibernate.connection.datasource` value,
Hibernate will assume it should use the specific `DataSourceBasedMultiTenantConnectionProviderImpl` implementation which works on a number of pretty reasonable assumptions when running inside of an app server and using one `javax.sql.DataSource` per tenant.
-See its https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/engine/jdbc/connections/spi/DataSourceBasedMultiTenantConnectionProviderImpl.html[Javadocs] for more details.
+See its https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/engine/jdbc/connections/spi/DataSourceBasedMultiTenantConnectionProviderImpl.html[Javadocs] for more details.
The following example portrays a `MultiTenantConnectionProvider` implementation that handles multiple ``ConnectionProvider``s.
diff --git a/documentation/src/main/asciidoc/userguide/chapters/pc/PersistenceContext.adoc b/documentation/src/main/asciidoc/userguide/chapters/pc/PersistenceContext.adoc
index d6e3d1c97bcf..a4f02d6f9846 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/pc/PersistenceContext.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/pc/PersistenceContext.adoc
@@ -188,10 +188,10 @@ but also inefficient.
While the Jakarta Persistence standard does not support retrieving multiple entities at once, other than running a JPQL or Criteria API query,
Hibernate offers this functionality via the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/Session.html#byMultipleIds-java.lang.Class-[`byMultipleIds` method] of the Hibernate `Session`.
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/Session.html#byMultipleIds-java.lang.Class-[`byMultipleIds` method] of the Hibernate `Session`.
The `byMultipleIds` method returns a
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/MultiIdentifierLoadAccess.html[`MultiIdentifierLoadAccess`]
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/MultiIdentifierLoadAccess.html[`MultiIdentifierLoadAccess`]
which you can use to customize the multi-load request.
The `MultiIdentifierLoadAccess` interface provides several methods which you can use to
@@ -221,10 +221,10 @@ When enabled, the result set will contain deleted entities.
When disabled (which is the default behavior), deleted entities are not included in the returning `List`.
`with(LockOptions lockOptions)`::
This setting allows you to pass a given
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/LockOptions.html[`LockOptions`] mode to the multi-load query.
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/LockOptions.html[`LockOptions`] mode to the multi-load query.
`with(CacheMode cacheMode)`::
This setting allows you to pass a given
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/CacheMode.html[`CacheMode`]
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/CacheMode.html[`CacheMode`]
strategy so that we can load entities from the second-level cache, therefore skipping the cached entities from being fetched via the multi-load query.
`withBatchSize(int batchSize)`::
This setting allows you to specify a batch size for loading the entities (e.g. how many at a time).
@@ -256,11 +256,11 @@ include::{extrasdir}/pc-by-multiple-ids-example.sql[]
====
Notice that only one SQL SELECT statement was executed since the second call uses the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/MultiIdentifierLoadAccess.html#enableSessionCheck-boolean-[`enableSessionCheck`] method of the `MultiIdentifierLoadAccess`
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/MultiIdentifierLoadAccess.html#enableSessionCheck-boolean-[`enableSessionCheck`] method of the `MultiIdentifierLoadAccess`
to instruct Hibernate to skip entities that are already loaded in the current Persistence Context.
If the entities are not available in the current Persistence Context but they could be loaded from the second-level cache, you can use the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/MultiIdentifierLoadAccess.html#with-org.hibernate.CacheMode-[`with(CacheMode)`] method of the `MultiIdentifierLoadAccess` object.
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/MultiIdentifierLoadAccess.html#with-org.hibernate.CacheMode-[`with(CacheMode)`] method of the `MultiIdentifierLoadAccess` object.
[[tag::pc-by-multiple-ids-second-level-cache-example]]
.Loading multiple entities from the second-level cache
@@ -280,7 +280,7 @@ shared cache.
Afterward, when executing the second `byMultipleIds` call for the same entities in a new Hibernate `Session`,
we set the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/CacheMode.html#NORMAL[`CacheMode.NORMAL`] second-level cache mode so that entities are going to be returned from the second-level cache.
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/CacheMode.html#NORMAL[`CacheMode.NORMAL`] second-level cache mode so that entities are going to be returned from the second-level cache.
The `getSecondLevelCacheHitCount` statistics method returns 3 this time, since the 3 entities were loaded from the second-level cache, and, as illustrated by `sqlStatementInterceptor.getSqlQueries()`, no multi-load SELECT statement was executed this time.
@@ -648,7 +648,7 @@ For this reason, the second-level collection cache is limited to storing whole c
When using the `@Filter` annotation and working with entities that are mapped onto multiple database tables,
you will need to use the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/SqlFragmentAlias.html[`@SqlFragmentAlias`] annotation
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/SqlFragmentAlias.html[`@SqlFragmentAlias`] annotation
if the `@Filter` defines a condition that uses predicates across multiple tables.
[[pc-filter-sql-fragment-alias-example]]
@@ -1425,18 +1425,18 @@ Certain methods of the Jakarta Persistence `EntityManager` or the Hibernate `Ses
Rolling back the database transaction does not put your business objects back into the state they were at the start of the transaction. This means that the database state and the business objects will be out of sync. Usually, this is not a problem because exceptions are not recoverable and you will have to start over after rollback anyway.
The Jakarta Persistence {jpaJavadocUrlPrefix}PersistenceException.html[`PersistenceException`] or the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/HibernateException.html[`HibernateException`] wraps most of the errors that can occur in a Hibernate persistence layer.
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/HibernateException.html[`HibernateException`] wraps most of the errors that can occur in a Hibernate persistence layer.
Both the `PersistenceException` and the `HibernateException` are runtime exceptions because, in our opinion, we should not force the application developer to catch an unrecoverable exception at a low layer. In most systems, unchecked and fatal exceptions are handled in one of the first frames of the method call stack (i.e., in higher layers) and either an error message is presented to the application user or some other appropriate action is taken. Note that Hibernate might also throw other unchecked exceptions that are not a `HibernateException`. These are not recoverable either, and appropriate action should be taken.
Hibernate wraps the JDBC `SQLException`, thrown while interacting with the database, in a
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/JDBCException.html[`JDBCException`].
-In fact, Hibernate will attempt to convert the exception into a more meaningful subclass of `JDBCException`. The underlying `SQLException` is always available via https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/JDBCException.html#getSQLException--[`JDBCException.getSQLException()`]. Hibernate converts the `SQLException` into an appropriate JDBCException subclass using the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/exception/spi/SQLExceptionConverter.html[`SQLExceptionConverter`]
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/JDBCException.html[`JDBCException`].
+In fact, Hibernate will attempt to convert the exception into a more meaningful subclass of `JDBCException`. The underlying `SQLException` is always available via https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/JDBCException.html#getSQLException--[`JDBCException.getSQLException()`]. Hibernate converts the `SQLException` into an appropriate JDBCException subclass using the
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/exception/spi/SQLExceptionConverter.html[`SQLExceptionConverter`]
attached to the current `SessionFactory`.
By default, the `SQLExceptionConverter` is defined by the configured Hibernate `Dialect` via the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/dialect/Dialect.html#buildSQLExceptionConversionDelegate--[`buildSQLExceptionConversionDelegate`] method
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/dialect/Dialect.html#buildSQLExceptionConversionDelegate--[`buildSQLExceptionConversionDelegate`] method
which is overridden by several database-specific ``Dialect``s.
The standard `JDBCException` subtypes are:
@@ -1465,8 +1465,8 @@ SQLGrammarException::
====
Starting with Hibernate 5.2, the Hibernate `Session` extends the Jakarta Persistence `EntityManager`. For this reason, when a `SessionFactory` is built via Hibernate's native bootstrapping,
the `HibernateException` or `SQLException` can be wrapped in a Jakarta Persistence {jpaJavadocUrlPrefix}PersistenceException.html[`PersistenceException`] when thrown
-by `Session` methods that implement `EntityManager` methods (e.g., https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/Session.html#merge-java.lang.Object-[Session.merge(Object object)],
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/Session.html#flush--[Session.flush()]).
+by `Session` methods that implement `EntityManager` methods (e.g., https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/Session.html#merge-java.lang.Object-[Session.merge(Object object)],
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/Session.html#flush--[Session.flush()]).
If your `SessionFactory` is built via Hibernate's native bootstrapping, and you don't want the Hibernate exceptions to be wrapped in the Jakarta Persistence `PersistenceException`, you need to set the
`hibernate.native_exception_handling_51_compliance` configuration property to `true`. See the
diff --git a/documentation/src/main/asciidoc/userguide/chapters/portability/Portability.adoc b/documentation/src/main/asciidoc/userguide/chapters/portability/Portability.adoc
index 21feacb3964d..f83352d0e858 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/portability/Portability.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/portability/Portability.adoc
@@ -14,8 +14,11 @@ Regardless of the exact scenario, the basic idea is that you want Hibernate to h
The first line of portability for Hibernate is the dialect, which is a specialization of the `org.hibernate.dialect.Dialect` contract.
A dialect encapsulates all the differences in how Hibernate must communicate with a particular database to accomplish some task like getting a sequence value or structuring a SELECT query.
-Hibernate bundles a wide range of dialects for many of the most popular databases.
-If you find that your particular database is not among them, it is not terribly difficult to write your own.
+
+Hibernate bundles a wide range of dialects for many of the most popular databases: see the link:{doc-dialect-url}[dialect guide] for details.
+If you find that your particular database is not among them,
+you can check link:{doc-dialect-url}#third-party-dialects[dialects implemented by third parties],
+and as a last resort it is not terribly difficult to write your own.
[[portability-dialectresolver]]
=== Dialect resolution
diff --git a/documentation/src/main/asciidoc/userguide/chapters/query/hql/Query.adoc b/documentation/src/main/asciidoc/userguide/chapters/query/hql/Query.adoc
index d009bab1c41a..948f66b630b9 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/query/hql/Query.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/query/hql/Query.adoc
@@ -87,7 +87,7 @@ include::{example-dir-model}/Person.java[tags=jpa-read-only-entities-native-exam
====
Alternatively, Hibernate offers an extended
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/NamedQuery.html[`@NamedQuery`] annotation
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/NamedQuery.html[`@NamedQuery`] annotation
which allows the specification of additional properties of the query, including flush mode, cacheability, and timeout interval, in a more typesafe way.
[[jpql-api-hibernate-named-query-example]]
@@ -348,7 +348,7 @@ On the other hand, `setHint()` refers to the Jakarta Persistence notion of a que
This is a completely different concept.
====
-For complete details, see the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/Query.html[Query] Javadocs.
+For complete details, see the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/Query.html[Query] Javadocs.
[[hql-api-basic-usage-example]]
.Advanced query control
@@ -364,7 +364,7 @@ include::{example-dir-query}/HQLTest.java[tags=hql-api-basic-usage-example]
A program may hook into the process of building the query results by providing a `org.hibernate.transform.ResultListTransformer` or `org.hibernate.transform.TupleTransformer`.
-See the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/transform/ResultListTransformer.html[Javadocs] along with the built-in implementations for additional details.
+See the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/transform/ResultListTransformer.html[Javadocs] along with the built-in implementations for additional details.
//[[hql-api-parameters]]
//==== Binding arguments to parameters
diff --git a/documentation/src/main/asciidoc/userguide/chapters/query/hql/QueryLanguage.adoc b/documentation/src/main/asciidoc/userguide/chapters/query/hql/QueryLanguage.adoc
index 9dddd502024a..416bdabe9027 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/query/hql/QueryLanguage.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/query/hql/QueryLanguage.adoc
@@ -880,7 +880,7 @@ There are some very important functions for working with dates and times.
The special function `extract()` obtains a single field of a date, time, or datetime.
Field types include: `day`, `month`, `year`, `second`, `minute`, `hour`, `day of week`, `day of month`, `week of year`, `date`, `time`, `epoch` and more.
-For a full list of field types, see the Javadoc for https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/query/TemporalUnit.html[`TemporalUnit`].
+For a full list of field types, see the Javadoc for https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/query/TemporalUnit.html[`TemporalUnit`].
====
[source, JAVA, indent=0]
@@ -918,7 +918,7 @@ This function formats a date, time, or datetime according to a pattern.
The syntax is `format(datetime as pattern)`, and the pattern must be written in a subset of the pattern language defined by Java's `java.time.format.DateTimeFormatter`.
-For a full list of `format()` pattern elements, see the Javadoc for https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/dialect/Dialect.html#appendDatetimeFormat[`Dialect#appendDatetimeFormat`].
+For a full list of `format()` pattern elements, see the Javadoc for https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/dialect/Dialect.html#appendDatetimeFormat[`Dialect#appendDatetimeFormat`].
[[hql-function-trunc-datetime]]
===== `trunc()` or `truncate()`
diff --git a/documentation/src/main/asciidoc/userguide/chapters/query/native/Native.adoc b/documentation/src/main/asciidoc/userguide/chapters/query/native/Native.adoc
index 7b4a99d273ea..452c498a33af 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/query/native/Native.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/query/native/Native.adoc
@@ -526,7 +526,7 @@ include::{doc-emeddable-example-dir}/SQLTest.java[tags=sql-hibernate-multiple-sc
----
====
-You can also use the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/NamedNativeQuery.html[`@NamedNativeQuery`] Hibernate annotation
+You can also use the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/NamedNativeQuery.html[`@NamedNativeQuery`] Hibernate annotation
to customize the named query using various configurations such as fetch mode, cacheability, time out interval.
[[sql-multiple-scalar-values-dto-NamedNativeQuery-hibernate-example]]
diff --git a/documentation/src/main/asciidoc/userguide/chapters/schema/Schema.adoc b/documentation/src/main/asciidoc/userguide/chapters/schema/Schema.adoc
index 61e030b59325..8c1e6ee1d06e 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/schema/Schema.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/schema/Schema.adoc
@@ -122,7 +122,7 @@ include::{extrasdir}/schema-generation-database-checks-persist-example.sql[]
[[schema-generation-column-default-value]]
=== Default value for a database column
-With Hibernate, you can specify a default value for a given database column using the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/ColumnDefault.html[`@ColumnDefault`] annotation.
+With Hibernate, you can specify a default value for a given database column using the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/ColumnDefault.html[`@ColumnDefault`] annotation.
[[schema-generation-column-default-value-mapping-example]]
.`@ColumnDefault` mapping example
@@ -163,7 +163,7 @@ include::{extrasdir}/schema-generation-column-default-value-persist-example.sql[
[TIP]
====
-If the column value should be generated not only when a row is inserted, but also when it's updated, the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/GeneratedColumn.html[`@GeneratedColumn`] annotation should be used.
+If the column value should be generated not only when a row is inserted, but also when it's updated, the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/annotations/GeneratedColumn.html[`@GeneratedColumn`] annotation should be used.
====
[[schema-generation-columns-unique-constraint]]
diff --git a/documentation/src/main/asciidoc/userguide/chapters/statistics/Statistics.adoc b/documentation/src/main/asciidoc/userguide/chapters/statistics/Statistics.adoc
index a520e950dec7..9707a565570a 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/statistics/Statistics.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/statistics/Statistics.adoc
@@ -19,7 +19,7 @@ By default, the statistics are not collected because this incurs an additional p
=== org.hibernate.stat.Statistics methods
The Hibernate statistics are made available via the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/stat/Statistics.html[`Statistics`] interface which exposes the following methods:
+https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/stat/Statistics.html[`Statistics`] interface which exposes the following methods:
[[statistics-general]]
==== General statistics methods
diff --git a/documentation/src/main/asciidoc/userguide/chapters/tooling/modelgen.adoc b/documentation/src/main/asciidoc/userguide/chapters/tooling/modelgen.adoc
index 99c0dcd9bc1b..044127c1c005 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/tooling/modelgen.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/tooling/modelgen.adoc
@@ -9,7 +9,7 @@
:ann-proc: https://docs.oracle.com/en/java/javase/11/tools/javac.html#GUID-082C33A5-CBCA-471A-845E-E77F79B7B049__GUID-3FA757C8-B67B-46BC-AEF9-7C3FFB126A93
:ann-proc-path: https://docs.oracle.com/en/java/javase/11/tools/javac.html#GUID-AEEC9F07-CB49-4E96-8BC7-BCC2C7F725C9__GUID-214E175F-0F06-4CDC-B511-5BA469955F5A
:ann-proc-options: https://docs.oracle.com/en/java/javase/11/tools/javac.html#GUID-AEEC9F07-CB49-4E96-8BC7-BCC2C7F725C9__GUID-6CC814A4-8A29-434A-B7E1-DF8234784E7C
-:intg-guide: https://docs.jboss.org/hibernate/orm/6.3/introduction/html_single/Hibernate_Introduction.html#generator
+:intg-guide: https://docs.hibernate.org/orm/{majorMinorVersion}/introduction/html_single/#generator
Jakarta Persistence defines a typesafe Criteria API which allows <>
queries to be constructed in a strongly-typed manner, utilizing so-called static metamodel
diff --git a/documentation/src/main/asciidoc/userguide/chapters/transactions/Transactions.adoc b/documentation/src/main/asciidoc/userguide/chapters/transactions/Transactions.adoc
index 028d770c752e..f34103fe78b9 100644
--- a/documentation/src/main/asciidoc/userguide/chapters/transactions/Transactions.adoc
+++ b/documentation/src/main/asciidoc/userguide/chapters/transactions/Transactions.adoc
@@ -42,7 +42,7 @@ or provide a custom `org.hibernate.resource.transaction.TransactionCoordinatorBu
[NOTE]
====
For details on implementing a custom `TransactionCoordinatorBuilder`, or simply better understanding how it works, see the
-https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/integrationguide/html_single/Hibernate_Integration_Guide.html[Integration Guide] .
+https://docs.hibernate.org/orm/{majorMinorVersion}/integrationguide/html_single/Hibernate_Integration_Guide.html[Integration Guide] .
====
Hibernate uses JDBC connections and JTA resources directly, without adding any additional locking behavior.
@@ -164,7 +164,7 @@ However, as of version 3.1, the processing behind `SessionFactory.getCurrentSess
To that end, a new extension interface, `org.hibernate.context.spi.CurrentSessionContext`,
and a new configuration parameter, `hibernate.current_session_context_class`, have been added to allow pluggability of the scope and context of defining current sessions.
-See the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/context/spi/CurrentSessionContext.html[Javadocs] for the `org.hibernate.context.spi.CurrentSessionContext` interface for a detailed discussion of its contract.
+See the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/context/spi/CurrentSessionContext.html[Javadocs] for the `org.hibernate.context.spi.CurrentSessionContext` interface for a detailed discussion of its contract.
It defines a single method, `currentSession()`, by which the implementation is responsible for tracking the current contextual session.
Out-of-the-box, Hibernate comes with three implementations of this interface:
@@ -172,7 +172,7 @@ Out-of-the-box, Hibernate comes with three implementations of this interface:
current sessions are tracked and scoped by a `JTA` transaction.
The processing here is exactly the same as in the older JTA-only approach.
`org.hibernate.context.internal.ThreadLocalSessionContext`::
-current sessions are tracked by thread of execution. See the https://docs.jboss.org/hibernate/orm/{majorMinorVersion}/javadocs/org/hibernate/context/internal/ThreadLocalSessionContext.html[Javadocs] for more details.
+current sessions are tracked by thread of execution. See the https://docs.hibernate.org/orm/{majorMinorVersion}/javadocs/org/hibernate/context/internal/ThreadLocalSessionContext.html[Javadocs] for more details.
`org.hibernate.context.internal.ManagedSessionContext`::
current sessions are tracked by thread of execution.
However, you are responsible to bind and unbind a `Session` instance with static methods on this class; it does not open, flush, or close a `Session`.
diff --git a/documentation/src/main/style/asciidoctor/js/toc.js b/documentation/src/main/style/asciidoctor/js/toc.js
index ec434553b538..e217934c673c 100644
--- a/documentation/src/main/style/asciidoctor/js/toc.js
+++ b/documentation/src/main/style/asciidoctor/js/toc.js
@@ -24,7 +24,7 @@ $(document).ready(function() {
$('#vchooser').append('');
for(var version in versions) {
- var path = 'http://docs.jboss.org/hibernate/orm' + versions[version];
+ var path = 'https://docs.hibernate.org/orm' + versions[version];
$('#vchooser').append('');
};
diff --git a/gradle/databases.gradle b/gradle/databases.gradle
index d00a6fbf689c..0bac132bbbb0 100644
--- a/gradle/databases.gradle
+++ b/gradle/databases.gradle
@@ -12,6 +12,8 @@ ext {
db = project.hasProperty('db') ? project.getProperty('db') : 'h2'
dbHost = System.getProperty( 'dbHost', 'localhost' )
dbService = System.getProperty( 'dbService', '' )
+ dbPassword = System.getProperty( 'dbPassword', '' ).replace('"', '')
+ dbConnectionStringSuffix = System.getProperty( 'dbConnectionStringSuffix', '' ).replace('"', '')
runID = System.getProperty( 'runID', '' )
dbBundle = [
h2 : [
@@ -79,13 +81,13 @@ ext {
],
edb_ci : [
'db.dialect' : 'org.hibernate.dialect.PostgresPlusDialect',
- 'jdbc.driver': 'org.postgresql.Driver',
+ 'jdbc.driver': 'com.edb.Driver',
'jdbc.user' : 'hibernate_orm_test',
'jdbc.pass' : 'hibernate_orm_test',
// Disable prepared statement caching due to https://www.postgresql.org/message-id/CAEcMXhmmRd4-%2BNQbnjDT26XNdUoXdmntV9zdr8%3DTu8PL9aVCYg%40mail.gmail.com
- 'jdbc.url' : 'jdbc:postgresql://' + dbHost + '/hibernate_orm_test?preparedStatementCacheQueries=0&escapeSyntaxCallMode=callIfNoReturn',
- 'jdbc.datasource' : 'org.postgresql.Driver',
-// 'jdbc.datasource' : 'org.postgresql.ds.PGSimpleDataSource',
+ 'jdbc.url' : 'jdbc:edb://' + dbHost + '/hibernate_orm_test?preparedStatementCacheQueries=0&escapeSyntaxCallMode=callIfNoReturn',
+ 'jdbc.datasource' : 'com.edb.Driver',
+// 'jdbc.datasource' : 'com.edb.ds.PGSimpleDataSource',
'connection.init_sql' : ''
],
sybase_ci : [
@@ -270,6 +272,15 @@ ext {
// 'jdbc.datasource' : 'oracle.jdbc.datasource.impl.OracleDataSource',
'connection.init_sql' : ''
],
+ oracle_test_pilot_database: [
+ 'db.dialect' : 'org.hibernate.dialect.OracleDialect',
+ 'jdbc.driver': 'oracle.jdbc.OracleDriver',
+ 'jdbc.user' : 'hibernate_orm_test_' + runID,
+ 'jdbc.pass' : dbPassword,
+ 'jdbc.url' : 'jdbc:oracle:thin:@' + dbConnectionStringSuffix + '?oracle.jdbc.enableQueryResultCache=false',
+ 'jdbc.datasource' : 'oracle.jdbc.OracleDriver',
+ 'connection.init_sql' : ''
+ ],
mssql : [
'db.dialect' : 'org.hibernate.dialect.SQLServerDialect',
'jdbc.driver': 'com.microsoft.sqlserver.jdbc.SQLServerDriver',
@@ -295,7 +306,7 @@ ext {
'jdbc.driver': 'com.informix.jdbc.IfxDriver',
'jdbc.user' : 'informix',
'jdbc.pass' : 'in4mix',
- 'jdbc.url' : 'jdbc:informix-sqli://' + dbHost + ':9088/dev:INFORMIXSERVER=informix;user=informix;password=in4mix;DELIMIDENT=Y;DB_LOCALE=en_US.utf8',
+ 'jdbc.url' : 'jdbc:informix-sqli://' + dbHost + ':9088/dev:INFORMIXSERVER=informix;user=informix;password=in4mix;DBDATE=Y4MD-;DELIMIDENT=Y;DB_LOCALE=en_US.utf8',
'jdbc.datasource' : 'com.informix.jdbc.IfxDriver',
// 'jdbc.datasource' : 'com.informix.jdbcx.IfxDataSource',
'connection.init_sql' : ''
diff --git a/gradle/gradle-develocity.gradle b/gradle/gradle-develocity.gradle
index 26422d767186..0a9e8c98e128 100644
--- a/gradle/gradle-develocity.gradle
+++ b/gradle/gradle-develocity.gradle
@@ -6,12 +6,13 @@
*/
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-// Applies details for `https://ge.hibernate.org`
+// Applies details for `https://develocity.commonhaus.dev`
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ext {
isCiEnvironment = isJenkins() || isGitHubActions() || isGenericCi()
- populateRemoteBuildCache = isEnabled( "POPULATE_REMOTE_GRADLE_CACHE" )
+ populateRemoteBuildCache = getSetting('POPULATE_REMOTE_GRADLE_CACHE').orElse('false').toBoolean()
+ useRemoteCache = !getSetting('DISABLE_REMOTE_GRADLE_CACHE').orElse('false').toBoolean()
}
private static boolean isJenkins() {
@@ -36,16 +37,8 @@ static java.util.Optional getSetting(String name) {
return java.util.Optional.ofNullable(sysProp);
}
-static boolean isEnabled(String setting) {
- if ( System.getenv().hasProperty( setting ) ) {
- return true
- }
-
- return System.hasProperty( setting )
-}
-
develocity {
- server = 'https://ge.hibernate.org'
+ server = 'https://develocity.commonhaus.dev'
buildScan {
capture {
diff --git a/gradle/java-module.gradle b/gradle/java-module.gradle
index a840f90db063..733142837616 100644
--- a/gradle/java-module.gradle
+++ b/gradle/java-module.gradle
@@ -46,6 +46,11 @@ apply plugin: 'project-report'
apply plugin: 'org.jetbrains.gradle.plugin.idea-ext'
+def skipJacoco = project.hasProperty('skipJacoco') ? project.getProperty('skipJacoco').toBoolean() : false
+if (!skipJacoco) {
+ plugins.apply('jacoco')
+}
+
ext {
java9ModuleNameBase = project.name.startsWith( 'hibernate-' ) ? name.drop( 'hibernate-'.length() ): name
java9ModuleName = "org.hibernate.orm.$project.java9ModuleNameBase".replace('-','.')
@@ -90,6 +95,7 @@ dependencies {
testImplementation testLibs.byteman
+ testRuntimeOnly testLibs.junit5Launcher
testRuntimeOnly testLibs.log4j2
testRuntimeOnly libs.byteBuddy
@@ -99,6 +105,7 @@ dependencies {
testRuntimeOnly dbLibs.derbyTools
testRuntimeOnly dbLibs.hsqldb
testRuntimeOnly dbLibs.postgresql
+ testRuntimeOnly dbLibs.edb
testRuntimeOnly dbLibs.mssql
testRuntimeOnly dbLibs.informix
testRuntimeOnly dbLibs.cockroachdb
@@ -245,6 +252,24 @@ tasks.withType( Test.class ).each { test ->
excludeTestsMatching project.property('excludeTests').toString()
}
}
+ if (!skipJacoco) {
+ def coverageReportSuffix = providers.gradleProperty('db').orElse('default')
+ .flatMap { db ->
+ // Oracle DBs tested on OTP are all using the same db name no matter the version...
+ // this would be a problem for creating/merging Jacoco reports, hence:
+ if ("oracle_test_pilot_database" == db) {
+ return providers.environmentVariable("RDBMS").orElse(db)
+ } else {
+ return providers.provider { db }
+ }
+ }
+ def javaVersion = JavaVersion.current().majorVersion
+ test.jacoco {
+ destinationFile = layout.buildDirectory.file(
+ coverageReportSuffix.map { db -> "jacoco/report-${db}-jdk${javaVersion}.exec" }
+ ).get().asFile
+ }
+ }
}
sourceSets {
diff --git a/gradle/published-java-module.gradle b/gradle/published-java-module.gradle
index b9ea71617b22..45791fabf74c 100644
--- a/gradle/published-java-module.gradle
+++ b/gradle/published-java-module.gradle
@@ -5,15 +5,9 @@
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
-apply from: rootProject.file( 'gradle/releasable.gradle' )
apply from: rootProject.file( 'gradle/java-module.gradle' )
apply from: rootProject.file( 'gradle/publishing-pom.gradle' )
-apply plugin: 'signing'
-
-// Make sure that the publishReleaseArtifacts task of the release module runs the release task of this sub module
-tasks.getByPath( ':release:publishReleaseArtifacts' ).dependsOn tasks.release
-
configurations {
javadocSources {
description 'Used to aggregate javadocs for the whole project'
@@ -21,6 +15,7 @@ configurations {
}
dependencies {
+ // `javadocSources` is a special Configuration which is used as a basis for the aggregated-javadocs we produce
javadocSources sourceSets.main.allJava
}
@@ -29,6 +24,8 @@ dependencies {
// Publishing
java {
+ // Configure the Java "software component" to include javadoc and sources jars in addition to the classes jar.
+ // Ultimately, this component is what makes up the publication for this project.
withJavadocJar()
withSourcesJar()
}
@@ -97,150 +94,26 @@ publishing {
}
}
-
-var signingKey = resolveSigningKey()
-var signingPassword = findSigningProperty( "signingPassword" )
-
-signing {
- useInMemoryPgpKeys( signingKey, signingPassword )
-
- sign publishing.publications.publishedArtifacts
-}
-
-String resolveSigningKey() {
- var key = findSigningProperty( "signingKey" )
- if ( key != null ) {
- return key
- }
-
- var keyFile = findSigningProperty( "signingKeyFile" )
- if ( keyFile != null ) {
- return new File( keyFile ).text
- }
-
- return null
-}
-
-String findSigningProperty(String propName) {
- if ( System.getProperty( propName ) != null ) {
- logger.debug "Found `{}` as a system property", propName
- return System.getProperty(propName )
- }
- else if ( System.getenv().get( propName ) != null ) {
- logger.debug "Found `{}` as an env-var property", propName
- return System.getenv().get( propName )
- }
- else if ( project.hasProperty( propName ) ) {
- logger.debug "Found `{}` as a project property", propName
- return project.hasProperty( propName )
- }
- else {
- logger.debug "Did not find `{}`", propName
- return null
- }
-}
-
-
-var signingTask = project.tasks.getByName( "signPublishedArtifactsPublication" ) as Sign
-var signingExtension = project.getExtensions().getByType(SigningExtension) as SigningExtension
-
-task sign {
- dependsOn "signPublications"
-}
-
-task signPublications { t ->
- tasks.withType( Sign ).all { s ->
- t.dependsOn s
- }
-}
-
-signingTask.doFirst {
- if ( signingKey == null || signingPassword == null ) {
- throw new GradleException(
- "Cannot perform signing without GPG details. Please set the `signingKey` and `signingKeyFile` properties"
- )
- }
-}
-
-
-boolean wasSigningExplicitlyRequested() {
- // check whether signing task was explicitly requested when running the build
- //
- // NOTE: due to https://discuss.gradle.org/t/how-to-tell-if-a-task-was-explicitly-asked-for-on-the-command-line/42853/3
- // we cannot definitively know whether the task was requested. Gradle really just does not expose this information.
- // so we make a convention - we check the "start parameters" object to see which task-names were requested;
- // the problem is that these are the raw names directly from the command line. e.g. it is perfectly legal to
- // say `gradlew signPubArtPub` in place of `gradlew signPublishedArtifactsPublication` - Gradle will simply
- // "expand" the name it finds. However, it does not make that available.
- //
- // so the convention is that we will check for the following task names
- //
- // for each of:
- // 1. `sign`
- // 2. `signPublications`
- // 3. `signPublishedArtifactsPublication`
- //
- // and we check both forms:
- // 1. "${taskName}"
- // 2. project.path + ":${taskName}"
- //
- // we need to check both again because of the "start parameters" discussion
-
- def signingTaskNames = ["sign", "signPublications", "signPublishedArtifactsPublication"]
-
- for ( String taskName : signingTaskNames ) {
- if ( gradle.startParameter.taskNames.contains( taskName )
- || gradle.startParameter.taskNames.contains( "${project.path}:${taskName}" ) ) {
- return true
- }
- }
-
- return false
-}
-
-if ( wasSigningExplicitlyRequested() ) {
- // signing was explicitly requested
- signingExtension.required = true
-}
-else {
- gradle.taskGraph.whenReady { graph ->
- if ( graph.hasTask( signingTask ) ) {
- // signing is scheduled to happen.
- //
- // we know, from above if-check, that it was not explicitly requested -
- // so it is triggered via task dependency. make sure we want it to happen
- var publishingTask = project.tasks.getByName( "publishPublishedArtifactsPublicationToSonatypeRepository" ) as PublishToMavenRepository
- if ( graph.hasTask( publishingTask ) ) {
- // we are publishing to Sonatype OSSRH - we need the signing to happen
- signingExtension.required = true
- }
- else {
- // signing was not explicitly requested and we are not publishing to OSSRH,
- // so do not sign.
- signingTask.enabled = false
- }
- }
-
- }
-}
-
-
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Release / publishing tasks
task ciBuild {
- dependsOn test, tasks.publishToSonatype
+ dependsOn test
}
-tasks.release.dependsOn tasks.test, tasks.publishToSonatype
-
-tasks.preVerifyRelease.dependsOn build
-tasks.preVerifyRelease.dependsOn generateMetadataFileForPublishedArtifactsPublication
-tasks.preVerifyRelease.dependsOn generatePomFileForPublishedArtifactsPublication
-tasks.preVerifyRelease.dependsOn generatePomFileForRelocationPomPublication
-
-tasks.publishToSonatype.mustRunAfter test
-
+task releasePrepare {
+ group 'Release'
+ description 'Performs release preparations on local check-out, including updating changelog'
+
+ dependsOn build
+ dependsOn generateMetadataFileForPublishedArtifactsPublication
+ dependsOn generatePomFileForPublishedArtifactsPublication
+ dependsOn generatePomFileForRelocationPomPublication
+ dependsOn generatePomFileForRelocationPomPublication
+ // we depend on publishAllPublicationsToStagingRepository to make sure that the artifacts are "published" to a local staging directory
+ // used by JReleaser during the release process
+ dependsOn publishAllPublicationsToStagingRepository
+}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Ancillary tasks
@@ -254,4 +127,4 @@ task showPublications {
}
}
}
-}
\ No newline at end of file
+}
diff --git a/gradle/publishing-pom.gradle b/gradle/publishing-pom.gradle
index 4654f0d873e1..cb8bfa7f72b5 100644
--- a/gradle/publishing-pom.gradle
+++ b/gradle/publishing-pom.gradle
@@ -59,5 +59,18 @@ publishing {
}
}
+ repositories {
+ maven {
+ name = "staging"
+ url = rootProject.layout.buildDirectory.dir("staging-deploy${File.separator}maven")
+ }
+ maven {
+ name = 'snapshots'
+ url = "https://central.sonatype.com/repository/maven-snapshots/"
+ // So that Gradle uses the `ORG_GRADLE_PROJECT_snapshotsPassword` / `ORG_GRADLE_PROJECT_snapshotsUsername`
+ // env variables to read the username/password for the `snapshots` repository publishing:
+ credentials(PasswordCredentials)
+ }
+ }
}
diff --git a/gradle/releasable.gradle b/gradle/releasable.gradle
deleted file mode 100644
index eb5052d02db7..000000000000
--- a/gradle/releasable.gradle
+++ /dev/null
@@ -1,10 +0,0 @@
-apply from: rootProject.file( 'gradle/base-information.gradle' )
-
-task release {
- mustRunAfter ':release:releaseChecks'
- enabled !project.ormVersion.isSnapshot
-}
-
-task preVerifyRelease {
- dependsOn ':release:preVerifyRelease'
-}
diff --git a/gradle/version.properties b/gradle/version.properties
index f61ae8f56a8b..a1568ee1fcc3 100644
--- a/gradle/version.properties
+++ b/gradle/version.properties
@@ -1 +1 @@
-hibernateVersion=6.6.1-SNAPSHOT
\ No newline at end of file
+hibernateVersion=6.6.47-SNAPSHOT
\ No newline at end of file
diff --git a/hibernate-agroal/src/main/java/org/hibernate/agroal/internal/AgroalConnectionProvider.java b/hibernate-agroal/src/main/java/org/hibernate/agroal/internal/AgroalConnectionProvider.java
index a96e42277c28..9ef17546b8b6 100644
--- a/hibernate-agroal/src/main/java/org/hibernate/agroal/internal/AgroalConnectionProvider.java
+++ b/hibernate-agroal/src/main/java/org/hibernate/agroal/internal/AgroalConnectionProvider.java
@@ -9,10 +9,11 @@
import java.sql.Connection;
import java.sql.SQLException;
+import java.sql.DatabaseMetaData;
+import javax.sql.DataSource;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;
-import javax.sql.DataSource;
import org.hibernate.HibernateException;
import org.hibernate.cfg.AgroalSettings;
@@ -36,6 +37,7 @@
import io.agroal.api.security.SimplePassword;
import static org.hibernate.cfg.AgroalSettings.AGROAL_CONFIG_PREFIX;
+import static org.hibernate.engine.jdbc.env.internal.JdbcEnvironmentInitiator.allowJdbcMetadataAccess;
/**
* ConnectionProvider based on Agroal connection pool
@@ -64,6 +66,7 @@ public class AgroalConnectionProvider implements ConnectionProvider, Configurabl
public static final String CONFIG_PREFIX = AGROAL_CONFIG_PREFIX + ".";
private static final long serialVersionUID = 1L;
private AgroalDataSource agroalDataSource = null;
+ private boolean isMetadataAccessAllowed = true;
// --- Configurable
@@ -92,6 +95,8 @@ private static void copyProperty(Map properties, String key,
@Override
public void configure(Map props) throws HibernateException {
+ isMetadataAccessAllowed = allowJdbcMetadataAccess( props );
+
ConnectionInfoLogger.INSTANCE.configureConnectionPool( "Agroal" );
try {
AgroalPropertiesReader agroalProperties = new AgroalPropertiesReader( CONFIG_PREFIX )
@@ -139,9 +144,12 @@ public DatabaseConnectionInfo getDatabaseConnectionInfo(Dialect dialect) {
final AgroalConnectionPoolConfiguration acpc = agroalDataSource.getConfiguration().connectionPoolConfiguration();
final AgroalConnectionFactoryConfiguration acfc = acpc.connectionFactoryConfiguration();
+
return new DatabaseConnectionInfoImpl(
acfc.jdbcUrl(),
- acfc.connectionProviderClass().toString(),
+ // Attempt to resolve the driver name from the dialect, in case it wasn't explicitly set and access to
+ // the database metadata is allowed
+ acfc.connectionProviderClass() != null ? acfc.connectionProviderClass().toString() : extractDriverNameFromMetadata(),
dialect.getVersion(),
Boolean.toString( acfc.autoCommit() ),
acfc.jdbcTransactionIsolation() != null
@@ -152,6 +160,19 @@ public DatabaseConnectionInfo getDatabaseConnectionInfo(Dialect dialect) {
);
}
+ private String extractDriverNameFromMetadata() {
+ if (isMetadataAccessAllowed) {
+ try ( Connection conn = getConnection() ) {
+ DatabaseMetaData dbmd = conn.getMetaData();
+ return dbmd.getDriverName();
+ }
+ catch (SQLException e) {
+ // Do nothing
+ }
+ }
+ return null;
+ }
+
@Override
public boolean isUnwrappableAs(Class> unwrapType) {
return ConnectionProvider.class.equals( unwrapType )
diff --git a/hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/C3P0ConnectionProviderTest.java b/hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/C3P0ConnectionProviderTest.java
index 7b3a2f519a75..ebe721c90c99 100644
--- a/hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/C3P0ConnectionProviderTest.java
+++ b/hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/C3P0ConnectionProviderTest.java
@@ -15,11 +15,13 @@
import org.hibernate.c3p0.internal.C3P0ConnectionProvider;
import org.hibernate.cfg.Environment;
+import org.hibernate.dialect.SybaseASEDialect;
import org.hibernate.engine.jdbc.env.internal.JdbcEnvironmentInitiator.ConnectionProviderJdbcConnectionAccess;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
+import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.Test;
import static org.hibernate.testing.junit4.ExtraAssertions.assertTyping;
@@ -29,6 +31,8 @@
/**
* @author Strong Liu
*/
+@SkipForDialect(dialectClass = SybaseASEDialect.class,
+ reason = "JtdsConnection.isValid not implemented")
public class C3P0ConnectionProviderTest extends BaseCoreFunctionalTestCase {
@Override
diff --git a/hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/C3p0TransactionIsolationConfigTest.java b/hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/C3p0TransactionIsolationConfigTest.java
index 3c83c192eb46..138ec195567c 100644
--- a/hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/C3p0TransactionIsolationConfigTest.java
+++ b/hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/C3p0TransactionIsolationConfigTest.java
@@ -10,6 +10,7 @@
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.c3p0.internal.C3P0ConnectionProvider;
import org.hibernate.community.dialect.AltibaseDialect;
+import org.hibernate.dialect.SybaseASEDialect;
import org.hibernate.dialect.TiDBDialect;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.service.spi.ServiceRegistryImplementor;
@@ -23,6 +24,7 @@
*/
@SkipForDialect(value = TiDBDialect.class, comment = "Doesn't support SERIALIZABLE isolation")
@SkipForDialect(value = AltibaseDialect.class, comment = "Altibase cannot change isolation level in autocommit mode")
+@SkipForDialect(value = SybaseASEDialect.class, comment = "JtdsConnection.isValid not implemented")
public class C3p0TransactionIsolationConfigTest extends BaseTransactionIsolationConfigTest {
private StandardServiceRegistry ssr;
diff --git a/hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/StatementCacheTest.java b/hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/StatementCacheTest.java
index f079f9beb007..7c2445aa5c84 100644
--- a/hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/StatementCacheTest.java
+++ b/hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/StatementCacheTest.java
@@ -11,10 +11,13 @@
import jakarta.persistence.criteria.CriteriaBuilder;
import jakarta.persistence.criteria.CriteriaQuery;
+import org.hibernate.dialect.SQLServerDialect;
+import org.hibernate.dialect.SybaseASEDialect;
+import org.hibernate.testing.orm.junit.JiraKey;
+import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.Assert;
import org.junit.Test;
-import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
/**
@@ -22,9 +25,15 @@
*
* @author Shawn Clowater
*/
+@SkipForDialect(dialectClass = SybaseASEDialect.class,
+ reason = "JtdsConnection.isValid not implemented")
+@SkipForDialect(dialectClass = SQLServerDialect.class,
+ reason = "started failing after upgrade to c3p0 0.10")
public class StatementCacheTest extends BaseCoreFunctionalTestCase {
@Test
- @TestForIssue( jiraKey = "HHH-7193" )
+ @JiraKey(value = "HHH-7193")
+ @SkipForDialect(dialectClass = SQLServerDialect.class,
+ reason = "started failing after upgrade to c3p0 0.10")
public void testStatementCaching() {
inSession(
session -> {
@@ -58,7 +67,8 @@ public void testStatementCaching() {
}
);
- //only one entity should have been inserted to the database (if the statement in the cache wasn't cleared then it would have inserted both entities)
+ // only one entity should have been inserted to the database
+ // (if the statement in the cache wasn't cleared then it would have inserted both entities)
inTransaction(
session -> {
CriteriaBuilder criteriaBuilder = session.getCriteriaBuilder();
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/AltibaseDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/AltibaseDialect.java
index 09d6ccbb2877..b4dc15316072 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/AltibaseDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/AltibaseDialect.java
@@ -395,20 +395,26 @@ public String castPattern(CastType from, CastType to) {
}
break;
case INTEGER_BOOLEAN:
- result = BooleanDecoder.toIntegerBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "1", "0" )
+ : BooleanDecoder.toIntegerBoolean( from );
if ( result != null ) {
return result;
}
break;
case YN_BOOLEAN:
- result = BooleanDecoder.toYesNoBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "'Y'", "'N'" )
+ : BooleanDecoder.toYesNoBoolean( from );
if ( result != null ) {
return result;
}
break;
case BOOLEAN:
case TF_BOOLEAN:
- result = BooleanDecoder.toTrueFalseBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "'T'", "'F'" )
+ : BooleanDecoder.toTrueFalseBoolean( from );
if ( result != null ) {
return result;
}
@@ -704,4 +710,14 @@ public SQLExceptionConversionDelegate buildSQLExceptionConversionDelegate() {
};
}
+ @Override
+ public String getDual() {
+ return "dual";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from " + getDual();
+ }
+
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/AltibaseSqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/AltibaseSqlAstTranslator.java
index 0ab86ef3ef16..a258f1d977d3 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/AltibaseSqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/AltibaseSqlAstTranslator.java
@@ -221,16 +221,6 @@ public void visitQueryPartTableReference(QueryPartTableReference tableReference)
emulateQueryPartTableReferenceColumnAliasing( tableReference );
}
- @Override
- protected String getDual() {
- return "dual";
- }
-
- @Override
- protected String getFromDualForSelectOnly() {
- return " from " + getDual();
- }
-
@Override
protected boolean needsRecursiveKeywordInWithClause() {
return false;
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CUBRIDDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CUBRIDDialect.java
index d73897a57aa6..1c4a5fbbca13 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CUBRIDDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CUBRIDDialect.java
@@ -517,4 +517,16 @@ private void timediff(
sqlAppender.append( diffUnit.conversionFactor( toUnit, this ) );
}
+ @Override
+ public String getDual() {
+ //TODO: is this really needed?
+ //TODO: would "from table({0})" be better?
+ return "db_root";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from " + getDual();
+ }
+
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CUBRIDSqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CUBRIDSqlAstTranslator.java
index e187406b3baa..5c59c6a29de3 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CUBRIDSqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CUBRIDSqlAstTranslator.java
@@ -80,16 +80,4 @@ protected boolean supportsRowValueConstructorSyntaxInInList() {
protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
-
- @Override
- protected String getDual() {
- //TODO: is this really needed?
- //TODO: would "from table({0})" be better?
- return "db_root";
- }
-
- @Override
- protected String getFromDualForSelectOnly() {
- return " from " + getDual();
- }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CockroachLegacyDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CockroachLegacyDialect.java
index c0b8b5840304..8d27ff9b5232 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CockroachLegacyDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CockroachLegacyDialect.java
@@ -61,10 +61,11 @@
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.JavaObjectType;
+import org.hibernate.type.descriptor.jdbc.BlobJdbcType;
+import org.hibernate.type.descriptor.jdbc.ClobJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
+import org.hibernate.type.descriptor.jdbc.NClobJdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectNullAsBinaryTypeJdbcType;
-import org.hibernate.type.descriptor.jdbc.VarbinaryJdbcType;
-import org.hibernate.type.descriptor.jdbc.VarcharJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.internal.NamedNativeEnumDdlTypeImpl;
@@ -391,9 +392,9 @@ protected void contributeCockroachTypes(TypeContributions typeContributions, Ser
}
// Force Blob binding to byte[] for CockroachDB
- jdbcTypeRegistry.addDescriptor( Types.BLOB, VarbinaryJdbcType.INSTANCE );
- jdbcTypeRegistry.addDescriptor( Types.CLOB, VarcharJdbcType.INSTANCE );
- jdbcTypeRegistry.addDescriptor( Types.NCLOB, VarcharJdbcType.INSTANCE );
+ jdbcTypeRegistry.addDescriptor( Types.BLOB, BlobJdbcType.MATERIALIZED );
+ jdbcTypeRegistry.addDescriptor( Types.CLOB, ClobJdbcType.MATERIALIZED );
+ jdbcTypeRegistry.addDescriptor( Types.NCLOB, NClobJdbcType.MATERIALIZED );
// The next two contributions are the same as for Postgresql
typeContributions.contributeJdbcType( ObjectNullAsBinaryTypeJdbcType.INSTANCE );
@@ -1042,6 +1043,8 @@ public boolean supportsOuterJoinForUpdate() {
@Override
public boolean useInputStreamToInsertBlob() {
+ // PG-JDBC treats setBinaryStream()/setCharacterStream() calls like bytea/varchar, which are not LOBs,
+ // so disable stream bindings for this dialect completely
return false;
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2LegacyDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2LegacyDialect.java
index c0cebad8c745..cf7b73ba713b 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2LegacyDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2LegacyDialect.java
@@ -70,6 +70,7 @@
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.procedure.internal.DB2CallableStatementSupport;
import org.hibernate.procedure.spi.CallableStatementSupport;
+import org.hibernate.query.sqm.CastType;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.query.sqm.TemporalUnit;
import org.hibernate.query.sqm.mutation.internal.cte.CteInsertStrategy;
@@ -869,6 +870,20 @@ public boolean supportsLobValueChangePropagation() {
return false;
}
+ @Override
+ public boolean useConnectionToCreateLob() {
+ return false;
+ }
+
+ @Override
+ public boolean supportsNationalizedMethods() {
+ // See HHH-12753, HHH-18314, HHH-19201
+ // Old DB2 JDBC drivers do not support setNClob, setNCharcterStream or setNString.
+ // In more recent driver versions, some methods just delegate to the non-N variant, but others still fail.
+ // Ultimately, let's just avoid the N variant methods on DB2 altogether
+ return false;
+ }
+
@Override
public boolean doesReadCommittedCauseWritersToBlockReaders() {
return true;
@@ -1056,6 +1071,19 @@ public IdentityColumnSupport getIdentityColumnSupport() {
return DB2IdentityColumnSupport.INSTANCE;
}
+ /**
+ * @return {@code true} because we can use {@code select ... from new table (insert .... )}
+ */
+ @Override
+ public boolean supportsInsertReturning() {
+ return true;
+ }
+
+ @Override
+ public boolean supportsInsertReturningRowId() {
+ return false;
+ }
+
@Override
public boolean supportsValuesList() {
return true;
@@ -1141,6 +1169,16 @@ public String extractPattern(TemporalUnit unit) {
return super.extractPattern( unit );
}
+ @Override
+ public String castPattern(CastType from, CastType to) {
+ if ( from == CastType.STRING && to == CastType.BOOLEAN ) {
+ return "cast(?1 as ?2)";
+ }
+ else {
+ return super.castPattern( from, to );
+ }
+ }
+
@Override
public int getInExpressionCountLimit() {
return BIND_PARAMETERS_NUMBER_LIMIT;
@@ -1208,4 +1246,14 @@ public DmlTargetColumnQualifierSupport getDmlTargetColumnQualifierSupport() {
public boolean supportsFromClauseInUpdate() {
return getDB2Version().isSameOrAfter( 11 );
}
+
+ @Override
+ public String getDual() {
+ return "sysibm.sysdummy1";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from " + getDual();
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2LegacySqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2LegacySqlAstTranslator.java
index f8ef6b3fb263..f6e8718b5e26 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2LegacySqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2LegacySqlAstTranslator.java
@@ -76,7 +76,7 @@ protected boolean supportsWithClauseInSubquery() {
}
@Override
- protected void renderTableReferenceJoins(TableGroup tableGroup) {
+ protected void renderTableReferenceJoins(TableGroup tableGroup, int swappedJoinIndex, boolean forceLeftJoin) {
// When we are in a recursive CTE, we can't render joins on DB2...
// See https://modern-sql.com/feature/with-recursive/db2/error-345-state-42836
if ( isInRecursiveQueryPart() ) {
@@ -103,7 +103,7 @@ protected void renderTableReferenceJoins(TableGroup tableGroup) {
}
}
else {
- super.renderTableReferenceJoins( tableGroup );
+ super.renderTableReferenceJoins( tableGroup, swappedJoinIndex, forceLeftJoin );
}
}
@@ -596,24 +596,19 @@ protected boolean supportsRowValueConstructorSyntax() {
return false;
}
- @Override
- protected boolean supportsRowValueConstructorSyntaxInInList() {
- return false;
- }
-
@Override
protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
@Override
- protected String getDual() {
- return "sysibm.dual";
+ protected boolean supportsRowValueConstructorSyntaxInInList() {
+ return false;
}
@Override
- protected String getFromDualForSelectOnly() {
- return " from " + getDual();
+ protected boolean supportsRowValueConstructorSyntaxInInSubQuery() {
+ return true;
}
@Override
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2iLegacyDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2iLegacyDialect.java
index 607efd39fb3e..507127bbbd84 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2iLegacyDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2iLegacyDialect.java
@@ -9,6 +9,7 @@
import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.function.CommonFunctionFactory;
+import org.hibernate.dialect.function.DB2SubstringFunction;
import org.hibernate.dialect.identity.DB2390IdentityColumnSupport;
import org.hibernate.dialect.identity.DB2IdentityColumnSupport;
import org.hibernate.dialect.identity.IdentityColumnSupport;
@@ -59,9 +60,14 @@ public DB2iLegacyDialect(DatabaseVersion version) {
@Override
public void initializeFunctionRegistry(FunctionContributions functionContributions) {
- super.initializeFunctionRegistry(functionContributions);
+ super.initializeFunctionRegistry( functionContributions );
+ // DB2 for i doesn't allow code units: https://www.ibm.com/docs/en/i/7.1.0?topic=functions-substring
+ functionContributions.getFunctionRegistry().register(
+ "substring",
+ new DB2SubstringFunction( false, functionContributions.getTypeConfiguration() )
+ );
if ( getVersion().isSameOrAfter( 7, 2 ) ) {
- CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions);
+ CommonFunctionFactory functionFactory = new CommonFunctionFactory( functionContributions );
functionFactory.listagg( null );
functionFactory.inverseDistributionOrderedSetAggregates();
functionFactory.hypotheticalOrderedSetAggregates_windowEmulation();
@@ -112,7 +118,7 @@ public SequenceSupport getSequenceSupport() {
@Override
public String getQuerySequencesString() {
if ( getVersion().isSameOrAfter(7,3) ) {
- return "select distinct sequence_name from qsys2.syssequences " +
+ return "select distinct sequence_schema as seqschema, sequence_name as seqname, START, minimum_value as minvalue, maximum_value as maxvalue, increment from qsys2.syssequences " +
"where current_schema='*LIBL' and sequence_schema in (select schema_name from qsys2.library_list_info) " +
"or sequence_schema=current_schema";
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2iLegacySqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2iLegacySqlAstTranslator.java
index 815f5561bd0f..6cbc9bb86f24 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2iLegacySqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2iLegacySqlAstTranslator.java
@@ -6,12 +6,15 @@
*/
package org.hibernate.community.dialect;
+import java.util.List;
+
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
+import org.hibernate.sql.ast.tree.expression.SqlTupleContainer;
import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.exec.spi.JdbcOperation;
@@ -42,15 +45,15 @@ protected boolean shouldEmulateFetchClause(QueryPart queryPart) {
if ( useOffsetFetchClause( queryPart ) && !isRowsOnlyFetchClauseType( queryPart ) ) {
return true;
}
- // According to LegacyDB2LimitHandler, variable limit also isn't supported before 7.10
- return version.isBefore(7, 10)
+ // According to LegacyDB2LimitHandler, variable limit also isn't supported before 7.1
+ return version.isBefore(7, 1)
&& queryPart.getFetchClauseExpression() != null
&& !( queryPart.getFetchClauseExpression() instanceof Literal );
}
@Override
protected boolean supportsOffsetClause() {
- return version.isSameOrAfter(7, 10);
+ return version.isSameOrAfter(7, 1);
}
@Override
@@ -58,6 +61,20 @@ protected void renderComparison(Expression lhs, ComparisonOperator operator, Exp
renderComparisonStandard( lhs, operator, rhs );
}
+ @Override
+ protected void renderExpressionsAsValuesSubquery(int tupleSize, List listExpressions) {
+ // DB2 for i supports type-inference in this special VALUES expression, but not if it's wrapped as SELECT
+ appendSql( "values" );
+ char separator = ' ';
+ for ( Expression expression : listExpressions ) {
+ appendSql( separator );
+ appendSql( OPEN_PARENTHESIS );
+ renderCommaSeparated( SqlTupleContainer.getSqlTuple( expression ).getExpressions() );
+ appendSql( CLOSE_PARENTHESIS );
+ separator = ',';
+ }
+ }
+
@Override
public DatabaseVersion getDB2Version() {
return DB2_LUW_VERSION9;
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2zLegacyDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2zLegacyDialect.java
index 6e8315dc0899..ad7b07daacf8 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2zLegacyDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2zLegacyDialect.java
@@ -251,4 +251,10 @@ public int rowIdSqlType() {
public String getRowIdColumnString(String rowId) {
return rowId( rowId ) + " rowid not null generated always";
}
+
+ @Override
+ public boolean supportsValuesList() {
+ // DB2 z/OS has a VALUES statement, but that doesn't support multiple values
+ return false;
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2zLegacySqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2zLegacySqlAstTranslator.java
index 8d8fb58c9b02..0dccd01de6f5 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2zLegacySqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DB2zLegacySqlAstTranslator.java
@@ -90,6 +90,12 @@ protected String getNewTableChangeModifier() {
return "final";
}
+ @Override
+ protected boolean preferUnionQueryForTupleInListPredicate() {
+ // DB2 z/OS can't use an index when rendering a union query
+ return false;
+ }
+
@Override
public DatabaseVersion getDB2Version() {
return DB2_LUW_VERSION9;
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DerbyLegacyDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DerbyLegacyDialect.java
index 70e098ece421..df1b6bfa4df1 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DerbyLegacyDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DerbyLegacyDialect.java
@@ -1055,6 +1055,11 @@ public boolean supportsWindowFunctions() {
return getVersion().isSameOrAfter( 10, 4 );
}
+ @Override
+ public boolean supportsValuesList() {
+ return true;
+ }
+
@Override
public IdentifierHelper buildIdentifierHelper(IdentifierHelperBuilder builder, DatabaseMetaData dbMetaData)
throws SQLException {
@@ -1066,4 +1071,14 @@ public IdentifierHelper buildIdentifierHelper(IdentifierHelperBuilder builder, D
public DmlTargetColumnQualifierSupport getDmlTargetColumnQualifierSupport() {
return DmlTargetColumnQualifierSupport.TABLE_ALIAS;
}
+
+ @Override
+ public String getDual() {
+ return "(values 0)";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from " + getDual() + " dual";
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DerbyLegacySqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DerbyLegacySqlAstTranslator.java
index 3aea1a5ebc1d..d3fee18516ff 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DerbyLegacySqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DerbyLegacySqlAstTranslator.java
@@ -302,16 +302,6 @@ protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
- @Override
- protected String getDual() {
- return "(values 0)";
- }
-
- @Override
- protected String getFromDualForSelectOnly() {
- return " from " + getDual() + " dual";
- }
-
@Override
protected boolean needsRowsToSkip() {
return !supportsOffsetFetchClause();
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/FirebirdDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/FirebirdDialect.java
index 6f47f288fa0e..380fea18a3f8 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/FirebirdDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/FirebirdDialect.java
@@ -403,25 +403,33 @@ public String castPattern(CastType from, CastType to) {
}
break;
case BOOLEAN:
- result = BooleanDecoder.toBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "true", "false" )
+ : BooleanDecoder.toBoolean( from );
if ( result != null ) {
return result;
}
break;
case INTEGER_BOOLEAN:
- result = BooleanDecoder.toIntegerBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "1", "0" )
+ : BooleanDecoder.toIntegerBoolean( from );
if ( result != null ) {
return result;
}
break;
case YN_BOOLEAN:
- result = BooleanDecoder.toYesNoBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "'Y'", "'N'" )
+ : BooleanDecoder.toYesNoBoolean( from );
if ( result != null ) {
return result;
}
break;
case TF_BOOLEAN:
- result = BooleanDecoder.toTrueFalseBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "'T'", "'F'" )
+ : BooleanDecoder.toTrueFalseBoolean( from );
if ( result != null ) {
return result;
}
@@ -1096,4 +1104,14 @@ else if ( supportsOffset && temporalAccessor instanceof Instant ) {
}
}
+
+ @Override
+ public String getDual() {
+ return "rdb$database";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from " + getDual();
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/FirebirdSqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/FirebirdSqlAstTranslator.java
index 6f9c1215fda9..84ca5355bfa3 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/FirebirdSqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/FirebirdSqlAstTranslator.java
@@ -262,16 +262,6 @@ protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
- @Override
- protected String getDual() {
- return "rdb$database";
- }
-
- @Override
- protected String getFromDualForSelectOnly() {
- return " from " + getDual();
- }
-
private boolean supportsOffsetFetchClause() {
return getDialect().getVersion().isSameOrAfter( 3 );
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/H2LegacyDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/H2LegacyDialect.java
index 133e12225afc..17764e545966 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/H2LegacyDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/H2LegacyDialect.java
@@ -48,6 +48,7 @@
import org.hibernate.internal.util.JdbcExceptionHelper;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
+import org.hibernate.query.sqm.CastType;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.dialect.NullOrdering;
@@ -514,6 +515,16 @@ public String extractPattern(TemporalUnit unit) {
: super.extractPattern(unit);
}
+ @Override
+ public String castPattern(CastType from, CastType to) {
+ if ( from == CastType.STRING && to == CastType.BOOLEAN ) {
+ return "cast(?1 as ?2)";
+ }
+ else {
+ return super.castPattern( from, to );
+ }
+ }
+
@Override
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType, IntervalType intervalType) {
if ( intervalType != null ) {
@@ -992,4 +1003,14 @@ public String getCaseInsensitiveLike() {
public boolean supportsCaseInsensitiveLike() {
return getVersion().isSameOrAfter( 1, 4, 194 );
}
+
+ @Override
+ public boolean supportsValuesList() {
+ return true;
+ }
+
+ @Override
+ public String getDual() {
+ return "dual";
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/H2LegacySqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/H2LegacySqlAstTranslator.java
index 7213cd169079..b081591b6452 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/H2LegacySqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/H2LegacySqlAstTranslator.java
@@ -340,13 +340,9 @@ protected boolean renderPrimaryTableReference(TableGroup tableGroup, LockMode lo
final TableReference tableRef = tableGroup.getPrimaryTableReference();
// The H2 parser can't handle a sub-query as first element in a nested join
// i.e. `join ( (select ...) alias join ... )`, so we have to introduce a dummy table reference
- if ( tableRef instanceof QueryPartTableReference || tableRef.getTableId().startsWith( "(select" ) ) {
- final boolean realTableGroup = tableGroup.isRealTableGroup()
- && ( CollectionHelper.isNotEmpty( tableGroup.getTableReferenceJoins() )
- || hasNestedTableGroupsToRender( tableGroup.getNestedTableGroupJoins() ) );
- if ( realTableGroup ) {
- appendSql( "dual cross join " );
- }
+ if ( getSqlBuffer().charAt( getSqlBuffer().length() - 1 ) == '('
+ && ( tableRef instanceof QueryPartTableReference || tableRef.getTableId().startsWith( "(select" ) ) ) {
+ appendSql( "dual cross join " );
}
return super.renderPrimaryTableReference( tableGroup, lockMode );
}
@@ -392,11 +388,6 @@ protected boolean supportsNullPrecedence() {
return getClauseStack().getCurrent() != Clause.WITHIN_GROUP || getDialect().getVersion().isSameOrAfter( 2 );
}
- @Override
- protected String getDual() {
- return "dual";
- }
-
private boolean supportsOffsetFetchClause() {
return getDialect().getVersion().isSameOrAfter( 1, 4, 195 );
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HSQLLegacyDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HSQLLegacyDialect.java
index b491f19a8592..5460ff680c8f 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HSQLLegacyDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HSQLLegacyDialect.java
@@ -315,25 +315,33 @@ public String castPattern(CastType from, CastType to) {
}
break;
case BOOLEAN:
- result = BooleanDecoder.toBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "true", "false" )
+ : BooleanDecoder.toBoolean( from );
if ( result != null ) {
return result;
}
break;
case INTEGER_BOOLEAN:
- result = BooleanDecoder.toIntegerBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "1", "0" )
+ : BooleanDecoder.toIntegerBoolean( from );
if ( result != null ) {
return result;
}
break;
case YN_BOOLEAN:
- result = BooleanDecoder.toYesNoBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "'Y'", "'N'" )
+ : BooleanDecoder.toYesNoBoolean( from );
if ( result != null ) {
return result;
}
break;
case TF_BOOLEAN:
- result = BooleanDecoder.toTrueFalseBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "'T'", "'F'" )
+ : BooleanDecoder.toTrueFalseBoolean( from );
if ( result != null ) {
return result;
}
@@ -825,6 +833,11 @@ public boolean requiresFloatCastingOfIntegerDivision() {
return true;
}
+ @Override
+ public boolean supportsValuesList() {
+ return true;
+ }
+
@Override
public IdentityColumnSupport getIdentityColumnSupport() {
return identityColumnSupport;
@@ -900,4 +913,9 @@ public UniqueDelegate getUniqueDelegate() {
public DmlTargetColumnQualifierSupport getDmlTargetColumnQualifierSupport() {
return DmlTargetColumnQualifierSupport.TABLE_ALIAS;
}
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from " + getDual();
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HSQLLegacySqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HSQLLegacySqlAstTranslator.java
index 8f7e958ee22b..e40a779eeda9 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HSQLLegacySqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HSQLLegacySqlAstTranslator.java
@@ -12,20 +12,14 @@
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.query.IllegalQueryOperationException;
-import org.hibernate.query.sqm.BinaryArithmeticOperator;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
-import org.hibernate.sql.ast.tree.MutationStatement;
import org.hibernate.sql.ast.tree.Statement;
-import org.hibernate.sql.ast.tree.cte.CteStatement;
-import org.hibernate.sql.ast.tree.delete.DeleteStatement;
-import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
-import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
@@ -147,8 +141,7 @@ protected void visitRecursivePath(Expression recursivePath, int sizeEstimate) {
protected void visitAnsiCaseSearchedExpression(
CaseSearchedExpression expression,
Consumer resultRenderer) {
- if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( expression )
- || areAllResultsPlainParametersOrLiterals( expression ) ) {
+ if ( areAllResultsPlainParametersOrStringLiterals( expression ) ) {
final List whenFragments = expression.getWhenFragments();
final Expression firstResult = whenFragments.get( 0 ).getResult();
super.visitAnsiCaseSearchedExpression(
@@ -172,8 +165,7 @@ protected void visitAnsiCaseSearchedExpression(
protected void visitAnsiCaseSimpleExpression(
CaseSimpleExpression expression,
Consumer resultRenderer) {
- if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( expression )
- || areAllResultsPlainParametersOrLiterals( expression ) ) {
+ if ( areAllResultsPlainParametersOrStringLiterals( expression ) ) {
final List whenFragments = expression.getWhenFragments();
final Expression firstResult = whenFragments.get( 0 ).getResult();
super.visitAnsiCaseSimpleExpression(
@@ -193,11 +185,11 @@ protected void visitAnsiCaseSimpleExpression(
}
}
- protected boolean areAllResultsPlainParametersOrLiterals(CaseSearchedExpression caseSearchedExpression) {
+ protected boolean areAllResultsPlainParametersOrStringLiterals(CaseSearchedExpression caseSearchedExpression) {
final List whenFragments = caseSearchedExpression.getWhenFragments();
final Expression firstResult = whenFragments.get( 0 ).getResult();
if ( isParameter( firstResult ) && getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT
- || isLiteral( firstResult ) ) {
+ || isStringLiteral( firstResult ) ) {
for ( int i = 1; i < whenFragments.size(); i++ ) {
final Expression result = whenFragments.get( i ).getResult();
if ( isParameter( result ) ) {
@@ -205,7 +197,7 @@ protected boolean areAllResultsPlainParametersOrLiterals(CaseSearchedExpression
return false;
}
}
- else if ( !isLiteral( result ) ) {
+ else if ( !isStringLiteral( result ) ) {
return false;
}
}
@@ -214,11 +206,11 @@ else if ( !isLiteral( result ) ) {
return false;
}
- protected boolean areAllResultsPlainParametersOrLiterals(CaseSimpleExpression caseSimpleExpression) {
+ protected boolean areAllResultsPlainParametersOrStringLiterals(CaseSimpleExpression caseSimpleExpression) {
final List whenFragments = caseSimpleExpression.getWhenFragments();
final Expression firstResult = whenFragments.get( 0 ).getResult();
if ( isParameter( firstResult ) && getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT
- || isLiteral( firstResult ) ) {
+ || isStringLiteral( firstResult ) ) {
for ( int i = 1; i < whenFragments.size(); i++ ) {
final Expression result = whenFragments.get( i ).getResult();
if ( isParameter( result ) ) {
@@ -226,7 +218,7 @@ protected boolean areAllResultsPlainParametersOrLiterals(CaseSimpleExpression ca
return false;
}
}
- else if ( !isLiteral( result ) ) {
+ else if ( !isStringLiteral( result ) ) {
return false;
}
}
@@ -235,6 +227,13 @@ else if ( !isLiteral( result ) ) {
return false;
}
+ private boolean isStringLiteral( Expression expression ) {
+ if ( expression instanceof Literal ) {
+ return ( (Literal) expression ).getJdbcMapping().getJdbcType().isStringLike();
+ }
+ return false;
+ }
+
@Override
public boolean supportsFilterClause() {
return true;
@@ -329,22 +328,20 @@ else if ( expression instanceof Summarization ) {
@Override
protected boolean supportsRowValueConstructorSyntax() {
- return false;
- }
-
- @Override
- protected boolean supportsRowValueConstructorSyntaxInInList() {
+ // It's supported but not usable due to a bug: https://sourceforge.net/p/hsqldb/bugs/1714/
return false;
}
@Override
protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
+ // It's supported but not usable due to a bug: https://sourceforge.net/p/hsqldb/bugs/1714/
return false;
}
@Override
- protected String getFromDualForSelectOnly() {
- return " from " + getDual();
+ protected boolean supportsRowValueConstructorSyntaxInInList() {
+ // It's supported but not usable due to a bug: https://sourceforge.net/p/hsqldb/bugs/1714/
+ return false;
}
private boolean supportsOffsetFetchClause() {
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/InformixDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/InformixDialect.java
index b269c9206b36..95e4983dc398 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/InformixDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/InformixDialect.java
@@ -7,6 +7,9 @@
package org.hibernate.community.dialect;
import java.sql.Types;
+import java.time.temporal.TemporalAccessor;
+import java.util.Date;
+import java.util.TimeZone;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.FunctionContributions;
@@ -23,6 +26,7 @@
import org.hibernate.dialect.NullOrdering;
import org.hibernate.dialect.Replacer;
import org.hibernate.dialect.SelectItemReferenceStrategy;
+import org.hibernate.dialect.VarcharUUIDJdbcType;
import org.hibernate.dialect.function.CaseLeastGreatestEmulation;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.identity.IdentityColumnSupport;
@@ -33,6 +37,7 @@
import org.hibernate.dialect.unique.UniqueDelegate;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
+import org.hibernate.engine.jdbc.env.spi.NameQualifierSupport;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtractor;
@@ -51,11 +56,13 @@
import org.hibernate.query.sqm.mutation.internal.temptable.LocalTemporaryTableMutationStrategy;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableInsertStrategy;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableMutationStrategy;
+import org.hibernate.query.sqm.produce.function.StandardFunctionArgumentTypeResolvers;
import org.hibernate.query.sqm.sql.SqmTranslator;
import org.hibernate.query.sqm.sql.SqmTranslatorFactory;
import org.hibernate.query.sqm.sql.StandardSqmTranslatorFactory;
import org.hibernate.query.sqm.tree.select.SqmSelectStatement;
import org.hibernate.service.ServiceRegistry;
+import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
import org.hibernate.sql.ast.spi.SqlAppender;
@@ -67,14 +74,21 @@
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.tool.schema.internal.StandardForeignKeyExporter;
import org.hibernate.tool.schema.spi.Exporter;
+import org.hibernate.type.JavaObjectType;
+import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.jdbc.ClobJdbcType;
+import org.hibernate.type.descriptor.jdbc.ObjectNullAsBinaryTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.DdlType;
import org.hibernate.type.descriptor.sql.internal.CapacityDependentDdlType;
+import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
+import jakarta.persistence.TemporalType;
+
import static org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtractor.extractUsingTemplate;
+import static org.hibernate.query.sqm.produce.function.FunctionParameterType.STRING;
import static org.hibernate.type.SqlTypes.BIGINT;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.FLOAT;
@@ -86,8 +100,17 @@
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.TINYINT;
+import static org.hibernate.type.SqlTypes.UUID;
import static org.hibernate.type.SqlTypes.VARBINARY;
import static org.hibernate.type.SqlTypes.VARCHAR;
+import static org.hibernate.type.descriptor.DateTimeUtils.JDBC_ESCAPE_END;
+import static org.hibernate.type.descriptor.DateTimeUtils.JDBC_ESCAPE_START_DATE;
+import static org.hibernate.type.descriptor.DateTimeUtils.JDBC_ESCAPE_START_TIME;
+import static org.hibernate.type.descriptor.DateTimeUtils.JDBC_ESCAPE_START_TIMESTAMP;
+import static org.hibernate.type.descriptor.DateTimeUtils.appendAsDate;
+import static org.hibernate.type.descriptor.DateTimeUtils.appendAsLocalTime;
+import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTime;
+import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMicros;
/**
* Dialect for Informix 7.31.UD3 with Informix
@@ -203,6 +226,7 @@ protected void registerColumnTypes(TypeContributions typeContributions, ServiceR
.withTypeCapacity( getMaxNVarcharLength(), columnType( NVARCHAR ) )
.build()
);
+ ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "char(36)", this ) );
}
@Override
@@ -234,6 +258,11 @@ public int getDefaultTimestampPrecision() {
return 5;
}
+ @Override
+ public boolean doesRoundTemporalOnOverflow() {
+ return false;
+ }
+
@Override
public int getFloatPrecision() {
return 8;
@@ -254,10 +283,10 @@ public void initializeFunctionRegistry(FunctionContributions functionContributio
super.initializeFunctionRegistry(functionContributions);
CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions);
+ functionFactory.aggregates( this, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionFactory.instr();
functionFactory.substr();
- functionFactory.substring_substr();
- //also natively supports ANSI-style substring()
+ functionFactory.substringFromFor();
functionFactory.trunc();
functionFactory.trim2();
functionFactory.space();
@@ -280,12 +309,30 @@ public void initializeFunctionRegistry(FunctionContributions functionContributio
functionFactory.monthsBetween();
functionFactory.stddev();
functionFactory.variance();
- functionFactory.locate_positionSubstring();
+ functionFactory.bitLength_pattern( "length(?1)*8" );
+
+ if ( getVersion().isSameOrAfter( 12 ) ) {
+ functionFactory.locate_charindex();
+ }
//coalesce() and nullif() both supported since Informix 12
functionContributions.getFunctionRegistry().register( "least", new CaseLeastGreatestEmulation( true ) );
functionContributions.getFunctionRegistry().register( "greatest", new CaseLeastGreatestEmulation( false ) );
+ functionContributions.getFunctionRegistry().namedDescriptorBuilder( "matches" )
+ .setInvariantType( functionContributions.getTypeConfiguration()
+ .getBasicTypeRegistry()
+ .resolve( StandardBasicTypes.STRING )
+ )
+ .setExactArgumentCount( 2 )
+ .setArgumentTypeResolver(
+ StandardFunctionArgumentTypeResolvers.impliedOrInvariant(
+ functionContributions.getTypeConfiguration(),
+ STRING
+ )
+ )
+ .setArgumentListSignature( "(STRING string, STRING pattern)" )
+ .register();
if ( supportsWindowFunctions() ) {
functionFactory.windowFunctions();
}
@@ -620,6 +667,11 @@ public String[] getDropSchemaCommand(String schemaName) {
return new String[] { "" };
}
+ @Override
+ public NameQualifierSupport getNameQualifierSupport() {
+ return NameQualifierSupport.BOTH;
+ }
+
@Override
public boolean useCrossReferenceForeignKeys(){
return true;
@@ -650,6 +702,11 @@ public String currentDate() {
return "today";
}
+ @Override
+ public String currentTime() {
+ return currentTimestamp();
+ }
+
@Override
public String currentTimestamp() {
return "current";
@@ -713,6 +770,56 @@ public static Replacer datetimeFormat(String format) {
.replace("S", "%F1");
}
+ @Override
+ public void appendDateTimeLiteral(
+ SqlAppender appender,
+ TemporalAccessor temporalAccessor,
+ TemporalType precision,
+ TimeZone jdbcTimeZone) {
+ switch ( precision ) {
+ case DATE:
+ appender.appendSql( JDBC_ESCAPE_START_DATE );
+ appendAsDate( appender, temporalAccessor );
+ appender.appendSql( JDBC_ESCAPE_END );
+ break;
+ case TIME:
+ appender.appendSql( JDBC_ESCAPE_START_TIME );
+ appendAsTime( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
+ appender.appendSql( JDBC_ESCAPE_END );
+ break;
+ case TIMESTAMP:
+ appender.appendSql( JDBC_ESCAPE_START_TIMESTAMP );
+ appendAsTimestampWithMicros( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
+ appender.appendSql( JDBC_ESCAPE_END );
+ break;
+ default:
+ throw new IllegalArgumentException();
+ }
+ }
+
+ @Override
+ public void appendDateTimeLiteral(SqlAppender appender, Date date, TemporalType precision, TimeZone jdbcTimeZone) {
+ switch ( precision ) {
+ case DATE:
+ appender.appendSql( JDBC_ESCAPE_START_DATE );
+ appendAsDate( appender, date );
+ appender.appendSql( JDBC_ESCAPE_END );
+ break;
+ case TIME:
+ appender.appendSql( JDBC_ESCAPE_START_TIME );
+ appendAsLocalTime( appender, date );
+ appender.appendSql( JDBC_ESCAPE_END );
+ break;
+ case TIMESTAMP:
+ appender.appendSql( JDBC_ESCAPE_START_TIMESTAMP );
+ appendAsTimestampWithMicros( appender, date, jdbcTimeZone );
+ appender.appendSql( JDBC_ESCAPE_END );
+ break;
+ default:
+ throw new IllegalArgumentException();
+ }
+ }
+
@Override
public String getSelectClauseNullString(int sqlType, TypeConfiguration typeConfiguration) {
DdlType descriptor = typeConfiguration.getDdlTypeRegistry().getDescriptor( sqlType );
@@ -743,5 +850,27 @@ public void contributeTypes(TypeContributions typeContributions, ServiceRegistry
super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
jdbcTypeRegistry.addDescriptor( Types.NCLOB, ClobJdbcType.DEFAULT );
+ typeContributions.contributeJdbcType( VarcharUUIDJdbcType.INSTANCE );
+ typeContributions.contributeJdbcType( ObjectNullAsBinaryTypeJdbcType.INSTANCE );
+
+ // Until we remove StandardBasicTypes, we have to keep this
+ typeContributions.contributeType(
+ new JavaObjectType(
+ ObjectNullAsBinaryTypeJdbcType.INSTANCE,
+ typeContributions.getTypeConfiguration()
+ .getJavaTypeRegistry()
+ .getDescriptor( Object.class )
+ )
+ );
+ }
+
+ @Override
+ public String getDual() {
+ return "(select 0 from systables where tabid=1)";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from " + getDual() + " dual";
}
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/InformixSqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/InformixSqlAstTranslator.java
index 22528b21d065..eb451d4369e3 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/InformixSqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/InformixSqlAstTranslator.java
@@ -153,16 +153,6 @@ protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
- @Override
- protected String getDual() {
- return "(select 0 from systables where tabid=1)";
- }
-
- @Override
- protected String getFromDualForSelectOnly() {
- return " from " + getDual() + " dual";
- }
-
@Override
protected void renderNull(Literal literal) {
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.NO_UNTYPED ) {
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/IngresDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/IngresDialect.java
index 5f62bfd28138..c2f5073404d0 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/IngresDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/IngresDialect.java
@@ -559,4 +559,15 @@ public String translateExtractField(TemporalUnit unit) {
public boolean supportsFetchClause(FetchClauseType type) {
return getVersion().isSameOrAfter( 9, 3 );
}
+
+ @Override
+ public String getDual() {
+ return "(select 0)";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ //this is only necessary if the query has a where clause
+ return " from " + getDual() + " dual";
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/IngresSqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/IngresSqlAstTranslator.java
index 7a4af55cc9a6..be2c420c79e9 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/IngresSqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/IngresSqlAstTranslator.java
@@ -138,17 +138,6 @@ protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
- @Override
- protected String getDual() {
- return "(select 0)";
- }
-
- @Override
- protected String getFromDualForSelectOnly() {
- //this is only necessary if the query has a where clause
- return " from " + getDual() + " dual";
- }
-
@Override
protected boolean needsRowsToSkip() {
return !supportsOffsetFetchClause();
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MariaDBLegacyDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MariaDBLegacyDialect.java
index d56e791050cb..402b34ee9a03 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MariaDBLegacyDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MariaDBLegacyDialect.java
@@ -264,4 +264,14 @@ public IdentifierHelper buildIdentifierHelper(IdentifierHelperBuilder builder, D
return super.buildIdentifierHelper( builder, dbMetaData );
}
+
+ @Override
+ public String getDual() {
+ return "dual";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return getVersion().isBefore( 10, 4 ) ? ( " from " + getDual() ) : "";
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MariaDBLegacySqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MariaDBLegacySqlAstTranslator.java
index 01cf6eb82e42..cfa1de6ec7ac 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MariaDBLegacySqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MariaDBLegacySqlAstTranslator.java
@@ -366,16 +366,6 @@ protected boolean supportsDistinctFromPredicate() {
return true;
}
- @Override
- protected String getDual() {
- return "dual";
- }
-
- @Override
- protected String getFromDualForSelectOnly() {
- return getDialect().getVersion().isBefore( 10, 4 ) ? ( " from " + getDual() ) : "";
- }
-
@Override
public MariaDBLegacyDialect getDialect() {
return this.dialect;
@@ -404,4 +394,13 @@ protected void renderStringContainsExactlyPredicate(Expression haystack, Express
needle.accept( this );
appendSql( ",'~','~~'),'?','~?'),'%','~%'),'%') escape '~'" );
}
+
+ @Override
+ protected void appendAssignmentColumn(ColumnReference column) {
+ column.appendColumnForWrite(
+ this,
+ getAffectedTableNames().size() > 1 && !(getStatement() instanceof InsertSelectStatement)
+ ? determineColumnReferenceQualifier( column )
+ : null );
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MaxDBDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MaxDBDialect.java
index e527bd5baafa..f179bed01780 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MaxDBDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MaxDBDialect.java
@@ -331,5 +331,15 @@ public String getTemporaryTableCreateOptions() {
public boolean supportsJdbcConnectionLobCreation(DatabaseMetaData databaseMetaData) {
return false;
}
+
+ @Override
+ public String getDual() {
+ return "dual";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from " + getDual();
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MaxDBSqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MaxDBSqlAstTranslator.java
index 011098389488..3955edfddcf3 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MaxDBSqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MaxDBSqlAstTranslator.java
@@ -92,14 +92,4 @@ protected boolean supportsRowValueConstructorSyntaxInInList() {
protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
-
- @Override
- protected String getDual() {
- return "dual";
- }
-
- @Override
- protected String getFromDualForSelectOnly() {
- return " from " + getDual();
- }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MimerSQLDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MimerSQLDialect.java
index 3da7ba4005a5..198ffa412d79 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MimerSQLDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MimerSQLDialect.java
@@ -344,4 +344,9 @@ public boolean useConnectionToCreateLob() {
public IdentityColumnSupport getIdentityColumnSupport() {
return MimerSQLIdentityColumnSupport.INSTANCE;
}
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from " + getDual();
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MimerSQLSqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MimerSQLSqlAstTranslator.java
index 7510df6451ba..16d79ec7ffbb 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MimerSQLSqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MimerSQLSqlAstTranslator.java
@@ -81,9 +81,4 @@ protected boolean supportsRowValueConstructorSyntaxInInList() {
protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
-
- @Override
- protected String getFromDualForSelectOnly() {
- return " from " + getDual();
- }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MySQLLegacyDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MySQLLegacyDialect.java
index 4739b447acd2..377d6f75cb62 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MySQLLegacyDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MySQLLegacyDialect.java
@@ -604,10 +604,7 @@ public void initializeFunctionRegistry(FunctionContributions functionContributio
BasicTypeRegistry basicTypeRegistry = functionContributions.getTypeConfiguration().getBasicTypeRegistry();
SqmFunctionRegistry functionRegistry = functionContributions.getFunctionRegistry();
- functionRegistry.noArgsBuilder( "localtime" )
- .setInvariantType(basicTypeRegistry.resolve( StandardBasicTypes.TIMESTAMP ))
- .setUseParenthesesWhenNoArgs( false )
- .register();
+
// pi() produces a value with 7 digits unless we're explicit
if ( getMySQLVersion().isSameOrAfter( 8 ) ) {
functionRegistry.patternDescriptorBuilder( "pi", "cast(pi() as double)" )
@@ -1429,4 +1426,14 @@ public boolean supportsFromClauseInUpdate() {
return true;
}
+ @Override
+ public String getDual() {
+ return "dual";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return getVersion().isSameOrAfter( 8 ) ? "" : ( " from " + getDual() );
+ }
+
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MySQLLegacySqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MySQLLegacySqlAstTranslator.java
index ec382f39b65d..3da05b0b441e 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MySQLLegacySqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MySQLLegacySqlAstTranslator.java
@@ -389,16 +389,6 @@ protected boolean supportsWithClause() {
return getDialect().getVersion().isSameOrAfter( 8 );
}
- @Override
- protected String getDual() {
- return "dual";
- }
-
- @Override
- protected String getFromDualForSelectOnly() {
- return getDialect().getVersion().isSameOrAfter( 8 ) ? "" : ( " from " + getDual() );
- }
-
@Override
public MySQLLegacyDialect getDialect() {
return (MySQLLegacyDialect) DialectDelegateWrapper.extractRealDialect( super.getDialect() );
@@ -423,4 +413,13 @@ protected void renderStringContainsExactlyPredicate(Expression haystack, Express
needle.accept( this );
appendSql( ",'~','~~'),'?','~?'),'%','~%'),'%') escape '~'" );
}
+
+ @Override
+ protected void appendAssignmentColumn(ColumnReference column) {
+ column.appendColumnForWrite(
+ this,
+ getAffectedTableNames().size() > 1 && !(getStatement() instanceof InsertSelectStatement)
+ ? determineColumnReferenceQualifier( column )
+ : null );
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/OracleLegacyDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/OracleLegacyDialect.java
index 14d0037cb2ae..ccb9206ea6a8 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/OracleLegacyDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/OracleLegacyDialect.java
@@ -13,6 +13,7 @@
import java.sql.Types;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
+import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import java.util.regex.Matcher;
@@ -27,6 +28,7 @@
import org.hibernate.dialect.DmlTargetColumnQualifierSupport;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
+import org.hibernate.dialect.OracleServerConfiguration;
import org.hibernate.dialect.OracleBooleanJdbcType;
import org.hibernate.dialect.OracleJdbcHelper;
import org.hibernate.dialect.OracleJsonJdbcType;
@@ -115,8 +117,10 @@
import jakarta.persistence.TemporalType;
+import static java.lang.String.join;
import static java.util.regex.Pattern.CASE_INSENSITIVE;
import static org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtractor.extractUsingTemplate;
+import static org.hibernate.internal.util.StringHelper.isEmpty;
import static org.hibernate.query.sqm.TemporalUnit.DAY;
import static org.hibernate.query.sqm.TemporalUnit.HOUR;
import static org.hibernate.query.sqm.TemporalUnit.MINUTE;
@@ -185,16 +189,54 @@ public class OracleLegacyDialect extends Dialect {
private final UniqueDelegate uniqueDelegate = new CreateTableUniqueDelegate(this);
private final SequenceSupport oracleSequenceSupport = OracleSequenceSupport.getInstance(this);
+ // Is it an Autonomous Database Cloud Service?
+ protected final boolean autonomous;
+
+ // Is MAX_STRING_SIZE set to EXTENDED?
+ protected final boolean extended;
+
+ // Is the database accessed using a database service protected by Application Continuity.
+ protected final boolean applicationContinuity;
+
+ protected final int driverMajorVersion;
+ protected final int driverMinorVersion;
+
public OracleLegacyDialect() {
this( DatabaseVersion.make( 8, 0 ) );
}
public OracleLegacyDialect(DatabaseVersion version) {
- super(version);
+ super( version );
+ autonomous = false;
+ extended = false;
+ applicationContinuity = false;
+ driverMajorVersion = 19;
+ driverMinorVersion = 0;
}
public OracleLegacyDialect(DialectResolutionInfo info) {
- super(info);
+ this( info, OracleServerConfiguration.fromDialectResolutionInfo( info ) );
+ }
+
+ public OracleLegacyDialect(DialectResolutionInfo info, OracleServerConfiguration serverConfiguration) {
+ super( info );
+ autonomous = serverConfiguration.isAutonomous();
+ extended = serverConfiguration.isExtended();
+ applicationContinuity = serverConfiguration.isApplicationContinuity();
+ this.driverMinorVersion = serverConfiguration.getDriverMinorVersion();
+ this.driverMajorVersion = serverConfiguration.getDriverMajorVersion();
+ }
+
+ public boolean isAutonomous() {
+ return autonomous;
+ }
+
+ public boolean isExtended() {
+ return extended;
+ }
+
+ public boolean isApplicationContinuity() {
+ return applicationContinuity;
}
@Override
@@ -398,20 +440,33 @@ public String castPattern(CastType from, CastType to) {
}
break;
case INTEGER_BOOLEAN:
- result = BooleanDecoder.toIntegerBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "1", "0" )
+ : BooleanDecoder.toIntegerBoolean( from );
if ( result != null ) {
return result;
}
break;
case YN_BOOLEAN:
- result = BooleanDecoder.toYesNoBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "'Y'", "'N'" )
+ : BooleanDecoder.toYesNoBoolean( from );
if ( result != null ) {
return result;
}
break;
case BOOLEAN:
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "true", "false" )
+ : BooleanDecoder.toBoolean( from );
+ if ( result != null ) {
+ return result;
+ }
+ break;
case TF_BOOLEAN:
- result = BooleanDecoder.toTrueFalseBoolean( from );
+ result = from == CastType.STRING
+ ? buildStringToBooleanCastDecode( "'T'", "'F'" )
+ : BooleanDecoder.toTrueFalseBoolean( from );
if ( result != null ) {
return result;
}
@@ -1203,6 +1258,17 @@ public boolean useFollowOnLocking(String sql, QueryOptions queryOptions) {
);
}
+ @Override
+ public String getQueryHintString(String query, List hintList) {
+ if ( hintList.isEmpty() ) {
+ return query;
+ }
+ else {
+ final String hints = join( " ", hintList );
+ return isEmpty( hints ) ? query : getQueryHintString( query, hints );
+ }
+ }
+
@Override
public String getQueryHintString(String sql, String hints) {
final String statementType = statementType( sql );
@@ -1560,8 +1626,18 @@ public boolean supportsFromClauseInUpdate() {
@Override
public boolean useInputStreamToInsertBlob() {
- // see HHH-18206
- return false;
+ // If application continuity is enabled, don't use stream bindings, since a replay could otherwise fail
+ // if the underlying stream doesn't support mark and reset
+ return !isApplicationContinuity();
+ }
+ @Override
+ public String getDual() {
+ return "dual";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from " + getDual();
}
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/OracleLegacySqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/OracleLegacySqlAstTranslator.java
index 1b8af5cc5d6c..15f5f489c427 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/OracleLegacySqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/OracleLegacySqlAstTranslator.java
@@ -674,16 +674,6 @@ protected boolean supportsRowValueConstructorSyntaxInInSubQuery() {
return getDialect().getVersion().isSameOrAfter( 9 );
}
- @Override
- protected String getDual() {
- return "dual";
- }
-
- @Override
- protected String getFromDualForSelectOnly() {
- return " from " + getDual();
- }
-
private boolean supportsOffsetFetchClause() {
return getDialect().supportsFetchClause( FetchClauseType.ROWS_ONLY );
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/PostgreSQLLegacyDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/PostgreSQLLegacyDialect.java
index 219079710bcf..27f526d1c1e9 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/PostgreSQLLegacyDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/PostgreSQLLegacyDialect.java
@@ -55,11 +55,14 @@
import org.hibernate.exception.spi.ViolatedConstraintNameExtractor;
import org.hibernate.internal.util.JdbcExceptionHelper;
import org.hibernate.metamodel.mapping.EntityMappingType;
+import org.hibernate.metamodel.mapping.SqlExpressible;
+import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.procedure.internal.PostgreSQLCallableStatementSupport;
import org.hibernate.procedure.spi.CallableStatementSupport;
import org.hibernate.query.SemanticException;
import org.hibernate.query.spi.QueryOptions;
+import org.hibernate.query.sqm.CastType;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.query.sqm.TemporalUnit;
@@ -216,6 +219,7 @@ protected String castType(int sqlTypeCode) {
case NCHAR:
case VARCHAR:
case NVARCHAR:
+ return "varchar";
case LONG32VARCHAR:
case LONG32NVARCHAR:
return "text";
@@ -418,6 +422,16 @@ public String extractPattern(TemporalUnit unit) {
}
}
+ @Override
+ public String castPattern(CastType from, CastType to) {
+ if ( from == CastType.STRING && to == CastType.BOOLEAN ) {
+ return "cast(?1 as ?2)";
+ }
+ else {
+ return super.castPattern( from, to );
+ }
+ }
+
/**
* {@code microsecond} is the smallest unit for an {@code interval},
* and the highest precision for a {@code timestamp}, so we could
@@ -847,6 +861,8 @@ public boolean supportsOuterJoinForUpdate() {
@Override
public boolean useInputStreamToInsertBlob() {
+ // PG-JDBC treats setBinaryStream()/setCharacterStream() calls like bytea/varchar, which are not LOBs,
+ // so disable stream bindings for this dialect completely
return false;
}
@@ -861,6 +877,14 @@ public String getSelectClauseNullString(int sqlType, TypeConfiguration typeConfi
return "cast(null as " + typeConfiguration.getDdlTypeRegistry().getDescriptor( sqlType ).getRawTypeName() + ")";
}
+ @Override
+ public String getSelectClauseNullString(SqlTypedMapping sqlType, TypeConfiguration typeConfiguration) {
+ final String castTypeName = typeConfiguration.getDdlTypeRegistry()
+ .getDescriptor( sqlType.getJdbcMapping().getJdbcType().getDdlTypeCode() )
+ .getCastTypeName( sqlType.toSize(), (SqlExpressible) sqlType.getJdbcMapping(), typeConfiguration.getDdlTypeRegistry() );
+ return "cast(null as " + castTypeName + ")";
+ }
+
@Override
public boolean supportsCommentOn() {
return true;
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/PostgreSQLLegacySqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/PostgreSQLLegacySqlAstTranslator.java
index 4dae9260dac3..2a96aa8d8c55 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/PostgreSQLLegacySqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/PostgreSQLLegacySqlAstTranslator.java
@@ -24,6 +24,7 @@
import org.hibernate.sql.ast.tree.insert.ConflictClause;
import org.hibernate.sql.ast.tree.insert.InsertSelectStatement;
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
+import org.hibernate.sql.ast.tree.predicate.InArrayPredicate;
import org.hibernate.sql.ast.tree.predicate.LikePredicate;
import org.hibernate.sql.ast.tree.predicate.NullnessPredicate;
import org.hibernate.sql.ast.tree.select.QueryGroup;
@@ -46,6 +47,14 @@ public class PostgreSQLLegacySqlAstTranslator extends A
public PostgreSQLLegacySqlAstTranslator(SessionFactoryImplementor sessionFactory, Statement statement) {
super( sessionFactory, statement );
}
+
+ @Override
+ public void visitInArrayPredicate(InArrayPredicate inArrayPredicate) {
+ inArrayPredicate.getTestExpression().accept( this );
+ appendSql( " = any (" );
+ inArrayPredicate.getArrayParameter().accept( this );
+ appendSql( ")" );
+ }
@Override
protected void renderInsertIntoNoColumns(TableInsertStandard tableInsert) {
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/RDMSOS2200Dialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/RDMSOS2200Dialect.java
index b4b111aa52a1..1e5f5db6cfcf 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/RDMSOS2200Dialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/RDMSOS2200Dialect.java
@@ -434,4 +434,14 @@ public void appendDatetimeFormat(SqlAppender appender, String format) {
public String trimPattern(TrimSpec specification, boolean isWhitespace) {
return AbstractTransactSQLDialect.replaceLtrimRtrim( specification, isWhitespace );
}
+
+ @Override
+ public String getDual() {
+ return "rdms.rdms_dummy";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from " + getDual() + " where key_col=1";
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/RDMSOS2200SqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/RDMSOS2200SqlAstTranslator.java
index 9a1f97109d78..24f726c877a1 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/RDMSOS2200SqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/RDMSOS2200SqlAstTranslator.java
@@ -124,14 +124,4 @@ protected boolean supportsRowValueConstructorSyntaxInInList() {
protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
-
- @Override
- protected String getDual() {
- return "rdms.rdms_dummy";
- }
-
- @Override
- protected String getFromDualForSelectOnly() {
- return " from " + getDual() + " where key_col=1";
- }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SingleStoreDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SingleStoreDialect.java
index 8af3ae96b905..7c0347fa07ce 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SingleStoreDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SingleStoreDialect.java
@@ -1411,4 +1411,9 @@ public boolean isForUpdateLockingEnabled() {
* @settingDefault {@code false}
*/
public static final String SINGLE_STORE_FOR_UPDATE_LOCK_ENABLED = "hibernate.dialect.singlestore.for_update_lock_enabled";
+
+ @Override
+ public String getDual() {
+ return "dual";
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SingleStoreSqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SingleStoreSqlAstTranslator.java
index c646a5500542..dac437dd4015 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SingleStoreSqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SingleStoreSqlAstTranslator.java
@@ -437,11 +437,6 @@ protected boolean supportsDistinctFromPredicate() {
return false;
}
- @Override
- protected String getDual() {
- return "dual";
- }
-
@Override
public SingleStoreDialect getDialect() {
return this.dialect;
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseASELegacyDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseASELegacyDialect.java
index 230b6d4f1e6a..db66214344cd 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseASELegacyDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseASELegacyDialect.java
@@ -125,13 +125,8 @@ protected void registerColumnTypes(TypeContributions typeContributions, ServiceR
// But with jTDS we can't use them as the driver can't handle the types
if ( getVersion().isSameOrAfter( 15, 5 ) && getDriverKind() != SybaseDriverKind.JTDS ) {
ddlTypeRegistry.addDescriptor(
- CapacityDependentDdlType.builder( DATE, "bigdatetime", "bigdatetime", this )
- .withTypeCapacity( 3, "datetime" )
- .build()
- );
- ddlTypeRegistry.addDescriptor(
- CapacityDependentDdlType.builder( TIME, "bigdatetime", "bigdatetime", this )
- .withTypeCapacity( 3, "datetime" )
+ CapacityDependentDdlType.builder( TIME, "bigtime", "bigtime", this )
+ .withTypeCapacity( 3, "time" )
.build()
);
ddlTypeRegistry.addDescriptor(
@@ -696,4 +691,9 @@ public LimitHandler getLimitHandler() {
}
return new TopLimitHandler(false);
}
+
+ @Override
+ public String getDual() {
+ return "(select 1 c1)";
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseASELegacySqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseASELegacySqlAstTranslator.java
index b5ace7590994..18197f136559 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseASELegacySqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseASELegacySqlAstTranslator.java
@@ -530,11 +530,6 @@ protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
- @Override
- protected String getDual() {
- return "(select 1 c1)";
- }
-
private boolean supportsTopClause() {
return getDialect().getVersion().isSameOrAfter( 12, 5 );
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseAnywhereDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseAnywhereDialect.java
index e77527c4c9a6..0a5d8d137367 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseAnywhereDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseAnywhereDialect.java
@@ -215,4 +215,14 @@ public LimitHandler getLimitHandler() {
return TopLimitHandler.INSTANCE;
}
+ @Override
+ public String getDual() {
+ return "sys.dummy";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from " + getDual();
+ }
+
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseAnywhereSqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseAnywhereSqlAstTranslator.java
index 841594f12e66..eb086e24fb5e 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseAnywhereSqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SybaseAnywhereSqlAstTranslator.java
@@ -125,6 +125,7 @@ protected boolean renderNamedTableReference(NamedTableReference tableReference,
// Just always return true because SQL Server doesn't support the FOR UPDATE clause
return true;
}
+ super.renderNamedTableReference( tableReference, lockMode );
return false;
}
@@ -239,14 +240,4 @@ protected boolean supportsRowValueConstructorSyntaxInInList() {
protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
-
- @Override
- protected String getDual() {
- return "sys.dummy";
- }
-
- @Override
- protected String getFromDualForSelectOnly() {
- return " from " + getDual();
- }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/TimesTenDialect.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/TimesTenDialect.java
index aa30500db527..0a7299d5c06e 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/TimesTenDialect.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/TimesTenDialect.java
@@ -15,8 +15,11 @@
import org.hibernate.community.dialect.sequence.SequenceInformationExtractorTimesTenDatabaseImpl;
import org.hibernate.community.dialect.sequence.TimesTenSequenceSupport;
import org.hibernate.dialect.Dialect;
+import org.hibernate.dialect.BooleanDecoder;
import org.hibernate.dialect.RowLockStrategy;
import org.hibernate.dialect.function.CommonFunctionFactory;
+import org.hibernate.dialect.function.OracleTruncFunction;
+import org.hibernate.query.sqm.produce.function.StandardFunctionReturnTypeResolvers;
import org.hibernate.dialect.lock.LockingStrategy;
import org.hibernate.dialect.lock.OptimisticForceIncrementLockingStrategy;
import org.hibernate.dialect.lock.OptimisticLockingStrategy;
@@ -34,6 +37,7 @@
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.persister.entity.Lockable;
+import org.hibernate.query.sqm.CastType;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.query.sqm.TemporalUnit;
import org.hibernate.query.sqm.mutation.internal.temptable.GlobalTemporaryTableInsertStrategy;
@@ -42,6 +46,7 @@
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableMutationStrategy;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
+import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.spi.StandardSqlAstTranslatorFactory;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.exec.spi.JdbcOperation;
@@ -52,6 +57,15 @@
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
+import org.hibernate.type.BasicType;
+import org.hibernate.type.BasicTypeRegistry;
+import org.hibernate.type.StandardBasicTypes;
+import org.hibernate.dialect.function.StandardSQLFunction;
+import org.hibernate.dialect.function.CurrentFunction;
+import org.hibernate.query.sqm.produce.function.StandardFunctionArgumentTypeResolvers;
+import jakarta.persistence.GenerationType;
+import java.util.Date;
+
import jakarta.persistence.TemporalType;
import static org.hibernate.dialect.SimpleDatabaseVersion.ZERO_VERSION;
@@ -59,14 +73,13 @@
import static org.hibernate.query.sqm.produce.function.FunctionParameterType.STRING;
/**
- * A SQL dialect for TimesTen 5.1.
+ * A SQL dialect for Oracle TimesTen
*
* Known limitations:
* joined-subclass support because of no CASE support in TimesTen
* No support for subqueries that includes aggregation
* - size() in HQL not supported
* - user queries that does subqueries with aggregation
- * No CLOB/BLOB support
* No cascade delete support.
* No Calendar support
* No support for updating primary keys.
@@ -90,6 +103,7 @@ protected String columnType(int sqlTypeCode) {
// for the default Oracle type mode
// TypeMode=0
case SqlTypes.BOOLEAN:
+ case SqlTypes.BIT:
case SqlTypes.TINYINT:
return "tt_tinyint";
case SqlTypes.SMALLINT:
@@ -101,15 +115,26 @@ protected String columnType(int sqlTypeCode) {
//note that 'binary_float'/'binary_double' might
//be better mappings for Java Float/Double
+ case SqlTypes.VARCHAR:
+ case SqlTypes.LONGVARCHAR:
+ return "varchar2($l)";
+
+ case SqlTypes.LONGVARBINARY:
+ return "varbinary($l)";
+
//'numeric'/'decimal' are synonyms for 'number'
case SqlTypes.NUMERIC:
case SqlTypes.DECIMAL:
return "number($p,$s)";
+ case SqlTypes.FLOAT:
+ return "binary_float";
+ case SqlTypes.DOUBLE:
+ return "binary_double";
+
case SqlTypes.DATE:
return "tt_date";
case SqlTypes.TIME:
return "tt_time";
- //`timestamp` has more precision than `tt_timestamp`
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
return "timestamp($p)";
@@ -157,22 +182,97 @@ public int getDefaultDecimalPrecision() {
public void initializeFunctionRegistry(FunctionContributions functionContributions) {
super.initializeFunctionRegistry(functionContributions);
- CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions);
+ final TypeConfiguration typeConfiguration = functionContributions.getTypeConfiguration();
+ CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions);
+ final BasicTypeRegistry basicTypeRegistry = typeConfiguration.getBasicTypeRegistry();
+ final BasicType timestampType = basicTypeRegistry.resolve( StandardBasicTypes.TIMESTAMP );
+ final BasicType stringType = basicTypeRegistry.resolve( StandardBasicTypes.STRING );
+ final BasicType longType = basicTypeRegistry.resolve( StandardBasicTypes.LONG );
+ final BasicTypeintType = basicTypeRegistry.resolve( StandardBasicTypes.INTEGER );
+
+ // String Functions
functionFactory.trim2();
- functionFactory.soundex();
- functionFactory.trunc();
+ functionFactory.characterLength_length( SqlAstNodeRenderingMode.DEFAULT );
+ functionFactory.concat_pipeOperator();
functionFactory.toCharNumberDateTimestamp();
- functionFactory.ceiling_ceil();
+ functionFactory.char_chr();
functionFactory.instr();
functionFactory.substr();
functionFactory.substring_substr();
- functionFactory.leftRight_substr();
- functionFactory.char_chr();
- functionFactory.rownumRowid();
- functionFactory.sysdate();
+ functionFactory.soundex();
+
+ // Date/Time Functions
+ functionContributions.getFunctionRegistry().register(
+ "sysdate", new CurrentFunction("sysdate", "sysdate", timestampType)
+ );
+ functionContributions.getFunctionRegistry().register(
+ "getdate", new CurrentFunction("getdate", "getdate()", timestampType )
+ );
+
+ // Multi-param date dialect functions
functionFactory.addMonths();
functionFactory.monthsBetween();
+ // Math functions
+ functionFactory.ceiling_ceil();
+ functionFactory.radians_acos();
+ functionFactory.degrees_acos();
+ functionFactory.sinh();
+ functionFactory.tanh();
+ functionContributions.getFunctionRegistry().register(
+ "trunc",
+ new OracleTruncFunction( functionContributions.getTypeConfiguration() )
+ );
+ functionContributions.getFunctionRegistry().registerAlternateKey( "truncate", "trunc" );
+ functionFactory.round();
+
+ // Bitwise functions
+ functionContributions.getFunctionRegistry()
+ .patternDescriptorBuilder( "bitor", "(?1+?2-bitand(?1,?2))")
+ .setExactArgumentCount( 2 )
+ .setArgumentTypeResolver( StandardFunctionArgumentTypeResolvers
+ .ARGUMENT_OR_IMPLIED_RESULT_TYPE )
+ .register();
+
+ functionContributions.getFunctionRegistry()
+ .patternDescriptorBuilder( "bitxor", "(?1+?2-2*bitand(?1,?2))")
+ .setExactArgumentCount( 2 )
+ .setArgumentTypeResolver( StandardFunctionArgumentTypeResolvers
+ .ARGUMENT_OR_IMPLIED_RESULT_TYPE )
+ .register();
+
+ // Misc. functions
+ functionContributions.getFunctionRegistry().namedDescriptorBuilder( "nvl" )
+ .setMinArgumentCount( 2 )
+ .setArgumentTypeResolver( StandardFunctionArgumentTypeResolvers.ARGUMENT_OR_IMPLIED_RESULT_TYPE )
+ .setReturnTypeResolver( StandardFunctionReturnTypeResolvers.useFirstNonNull() )
+ .register();
+
+ functionContributions.getFunctionRegistry().register(
+ "user", new CurrentFunction("user", "user", stringType)
+ );
+ functionContributions.getFunctionRegistry().register(
+ "rowid", new CurrentFunction("rowid", "rowid", stringType)
+ );
+ functionContributions.getFunctionRegistry().register(
+ "uid", new CurrentFunction("uid", "uid", intType)
+ );
+ functionContributions.getFunctionRegistry().register(
+ "rownum", new CurrentFunction("rownum", "rownum", longType)
+ );
+ functionContributions.getFunctionRegistry().register(
+ "vsize", new StandardSQLFunction("vsize", StandardBasicTypes.DOUBLE)
+ );
+ functionContributions.getFunctionRegistry().register(
+ "SESSION_USER", new CurrentFunction("SESSION_USER","SESSION_USER", stringType)
+ );
+ functionContributions.getFunctionRegistry().register(
+ "SYSTEM_USER", new CurrentFunction("SYSTEM_USER", "SYSTEM_USER", stringType)
+ );
+ functionContributions.getFunctionRegistry().register(
+ "CURRENT_USER", new CurrentFunction("CURRENT_USER","CURRENT_USER", stringType)
+ );
+
functionContributions.getFunctionRegistry().registerBinaryTernaryPattern(
"locate",
functionContributions.getTypeConfiguration().getBasicTypeRegistry().resolve( StandardBasicTypes.INTEGER ),
@@ -426,4 +526,104 @@ public String getSelectClauseNullString(int sqlType, TypeConfiguration typeConfi
}
}
+ @Override
+ public String getNativeIdentifierGeneratorStrategy() {
+ return "sequence";
+ }
+
+ @Override
+ public String currentDate() {
+ return "sysdate";
+ }
+
+ @Override
+ public String currentTime() {
+ return "sysdate";
+ }
+
+ @Override
+ public String currentTimestamp() {
+ return "sysdate";
+ }
+
+ @Override
+ public int getMaxVarcharLength() {
+ // 1 to 4,194,304 bytes according to TimesTen Doc
+ return 4194304;
+ }
+
+ @Override
+ public int getMaxVarbinaryLength() {
+ // 1 to 4,194,304 bytes according to TimesTen Doc
+ return 4194304;
+ }
+
+ @Override
+ public boolean isEmptyStringTreatedAsNull() {
+ return true;
+ }
+
+ @Override
+ public boolean supportsTupleDistinctCounts() {
+ return false;
+ }
+
+ @Override
+ public String getDual() {
+ return "dual";
+ }
+
+ @Override
+ public String getFromDualForSelectOnly() {
+ return " from dual";
+ }
+
+ @Override
+ public String castPattern(CastType from, CastType to) {
+ String result;
+ switch ( to ) {
+ case INTEGER:
+ case LONG:
+ result = BooleanDecoder.toInteger( from );
+ if ( result != null ) {
+ return result;
+ }
+ break;
+ case STRING:
+ switch ( from ) {
+ case BOOLEAN:
+ case INTEGER_BOOLEAN:
+ case TF_BOOLEAN:
+ case YN_BOOLEAN:
+ return BooleanDecoder.toString( from );
+ case DATE:
+ return "to_char(?1,'YYYY-MM-DD')";
+ case TIME:
+ return "to_char(?1,'HH24:MI:SS')";
+ case TIMESTAMP:
+ return "to_char(?1,'YYYY-MM-DD HH24:MI:SS.FF9')";
+ }
+ break;
+ case CLOB:
+ return "to_clob(?1)";
+ case DATE:
+ if ( from == CastType.STRING ) {
+ return "to_date(?1,'YYYY-MM-DD')";
+ }
+ break;
+ case TIME:
+ if ( from == CastType.STRING ) {
+ return "to_date(?1,'HH24:MI:SS')";
+ }
+ break;
+ case TIMESTAMP:
+ if ( from == CastType.STRING ) {
+ return "to_timestamp(?1,'YYYY-MM-DD HH24:MI:SS.FF9')";
+ }
+ break;
+ }
+ return super.castPattern(from, to);
+ }
+
+
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/TimesTenSqlAstTranslator.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/TimesTenSqlAstTranslator.java
index b0eadebbfa06..b4ff40f70993 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/TimesTenSqlAstTranslator.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/TimesTenSqlAstTranslator.java
@@ -28,6 +28,8 @@
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SelectClause;
import org.hibernate.sql.exec.spi.JdbcOperation;
+import org.hibernate.internal.util.collections.Stack;
+import org.hibernate.sql.ast.Clause;
/**
* A SQL AST translator for TimesTen.
@@ -143,4 +145,66 @@ protected boolean supportsRowValueConstructorSyntaxInInList() {
protected boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
+
+ protected void renderRowsToClause(QuerySpec querySpec) {
+ if ( querySpec.isRoot() && hasLimit() ) {
+ prepareLimitOffsetParameters();
+ renderRowsToClause( getOffsetParameter(), getLimitParameter() );
+ }
+ else {
+ assertRowsOnlyFetchClauseType( querySpec );
+ renderRowsToClause( querySpec.getOffsetClauseExpression(), querySpec.getFetchClauseExpression() );
+ }
+ }
+
+ protected void renderRowsToClause(Expression offsetClauseExpression, Expression fetchClauseExpression) {
+ // offsetClauseExpression -> firstRow
+ // fetchClauseExpression -> maxRows
+ final Stack clauseStack = getClauseStack();
+
+ if ( offsetClauseExpression == null && fetchClauseExpression != null ) {
+ // We only have a maxRows/limit. We use 'SELECT FIRST n' syntax
+ appendSql("first ");
+ clauseStack.push( Clause.FETCH );
+ try {
+ renderFetchExpression( fetchClauseExpression );
+ }
+ finally {
+ clauseStack.pop();
+ }
+ }
+ else if ( offsetClauseExpression != null ) {
+ // We have an offset. We use 'SELECT ROWS m TO n' syntax
+ appendSql( "rows " );
+
+ // Render offset parameter
+ clauseStack.push( Clause.OFFSET );
+ try {
+ renderOffsetExpression( offsetClauseExpression );
+ }
+ finally {
+ clauseStack.pop();
+ }
+
+ appendSql( " to " );
+
+ // Render maxRows/limit parameter
+ clauseStack.push( Clause.FETCH );
+ try {
+ if ( fetchClauseExpression != null ) {
+ // We need to substract 1 row to fit maxRows
+ renderFetchPlusOffsetExpressionAsSingleParameter( fetchClauseExpression, offsetClauseExpression, -1 );
+ }
+ else{
+ // We dont have a maxRows param, we will just use a MAX_VALUE
+ appendSql( Integer.MAX_VALUE );
+ }
+ }
+ finally {
+ clauseStack.pop();
+ }
+ }
+
+ appendSql( WHITESPACE );
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/pagination/TimesTenLimitHandler.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/pagination/TimesTenLimitHandler.java
index 4d95ef2af0df..3dafe56b977c 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/pagination/TimesTenLimitHandler.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/pagination/TimesTenLimitHandler.java
@@ -6,23 +6,48 @@
*/
package org.hibernate.community.dialect.pagination;
+import org.hibernate.dialect.pagination.AbstractSimpleLimitHandler;
import org.hibernate.dialect.pagination.LimitHandler;
/**
* A {@link LimitHandler} for TimesTen, which uses {@code ROWS n},
* but at the start of the query instead of at the end.
*/
-public class TimesTenLimitHandler extends RowsLimitHandler {
+public class TimesTenLimitHandler extends AbstractSimpleLimitHandler {
public static final TimesTenLimitHandler INSTANCE = new TimesTenLimitHandler();
+ public TimesTenLimitHandler(){
+ }
+
+ @Override
+ public boolean supportsOffset() {
+ return false;
+ }
+
+ @Override
+ public boolean supportsLimitOffset() {
+ return true;
+ }
+
@Override
- protected String insert(String rows, String sql) {
- return insertAfterSelect( rows, sql );
+ // TimesTen is 1 based
+ public int convertToFirstRowValue(int zeroBasedFirstResult) {
+ return zeroBasedFirstResult + 1;
+ }
+
+ @Override
+ public boolean useMaxForLimit() {
+ return true;
}
@Override
public boolean bindLimitParametersFirst() {
return true;
}
+
+ @Override
+ protected String limitClause(boolean hasFirstRow) {
+ return hasFirstRow ? " rows ? to ?" : " first ?";
+ }
}
diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/sequence/TimesTenSequenceSupport.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/sequence/TimesTenSequenceSupport.java
index 802aa1b5801d..c0aa8308c519 100644
--- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/sequence/TimesTenSequenceSupport.java
+++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/sequence/TimesTenSequenceSupport.java
@@ -6,7 +6,6 @@
*/
package org.hibernate.community.dialect.sequence;
-import org.hibernate.dialect.sequence.NextvalSequenceSupport;
import org.hibernate.dialect.sequence.SequenceSupport;
/**
@@ -14,13 +13,37 @@
*
* @author Gavin King
*/
-public final class TimesTenSequenceSupport extends NextvalSequenceSupport {
+public final class TimesTenSequenceSupport implements SequenceSupport {
public static final SequenceSupport INSTANCE = new TimesTenSequenceSupport();
+ @Override
+ public boolean supportsPooledSequences() {
+ return true;
+ }
+
+ @Override
+ public String getSelectSequenceNextValString(String sequenceName) {
+ return sequenceName + ".nextval";
+ }
+
+ @Override
+ public String getSequenceNextValString(String sequenceName) {
+ return "select " + sequenceName + ".nextval from sys.dual";
+ }
+
@Override
public String getFromDual() {
return " from sys.dual";
}
+ @Override
+ public String getCreateSequenceString(String sequenceName) {
+ return "create sequence " + sequenceName;
+ }
+
+ @Override
+ public String getDropSequenceString(String sequenceName) {
+ return "drop sequence " + sequenceName;
+ }
}
diff --git a/hibernate-community-dialects/src/test/java/org/hibernate/community/dialect/FetchPlusOffsetParameterTest.java b/hibernate-community-dialects/src/test/java/org/hibernate/community/dialect/FetchPlusOffsetParameterTest.java
new file mode 100644
index 000000000000..786a84c20d99
--- /dev/null
+++ b/hibernate-community-dialects/src/test/java/org/hibernate/community/dialect/FetchPlusOffsetParameterTest.java
@@ -0,0 +1,155 @@
+/*
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * License: GNU Lesser General Public License (LGPL), version 2.1 or later
+ * See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
+ */
+package org.hibernate.community.dialect;
+
+import java.util.List;
+
+import org.hibernate.cfg.AvailableSettings;
+import org.hibernate.dialect.H2Dialect;
+import org.hibernate.dialect.H2SqlAstTranslator;
+import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
+import org.hibernate.engine.spi.SessionFactoryImplementor;
+import org.hibernate.query.sqm.FetchClauseType;
+import org.hibernate.sql.ast.Clause;
+import org.hibernate.sql.ast.SqlAstTranslator;
+import org.hibernate.sql.ast.SqlAstTranslatorFactory;
+import org.hibernate.sql.ast.spi.StandardSqlAstTranslatorFactory;
+import org.hibernate.sql.ast.tree.Statement;
+import org.hibernate.sql.ast.tree.expression.Expression;
+import org.hibernate.sql.ast.tree.select.QueryPart;
+import org.hibernate.sql.exec.spi.JdbcOperation;
+
+import org.hibernate.testing.orm.junit.DomainModel;
+import org.hibernate.testing.orm.junit.Jira;
+import org.hibernate.testing.orm.junit.RequiresDialect;
+import org.hibernate.testing.orm.junit.ServiceRegistry;
+import org.hibernate.testing.orm.junit.SessionFactory;
+import org.hibernate.testing.orm.junit.SessionFactoryScope;
+import org.hibernate.testing.orm.junit.SettingProvider;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import jakarta.persistence.Entity;
+import jakarta.persistence.Id;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+@RequiresDialect(H2Dialect.class)
+@DomainModel(annotatedClasses = FetchPlusOffsetParameterTest.Book.class)
+@SessionFactory
+@ServiceRegistry(
+ settingProviders = @SettingProvider(settingName = AvailableSettings.DIALECT, provider = FetchPlusOffsetParameterTest.TestSettingProvider.class)
+)
+@Jira("https://hibernate.atlassian.net/browse/HHH-19888")
+public class FetchPlusOffsetParameterTest {
+
+ @BeforeEach
+ protected void prepareTest(SessionFactoryScope scope) {
+ scope.inTransaction(
+ (session) -> {
+ for ( int i = 1; i <= 3; i++ ) {
+ session.persist( new Book( i, "Book " + i ) );
+ }
+ }
+ );
+ }
+
+ @Test
+ public void testStaticOffset(SessionFactoryScope scope) {
+ scope.inTransaction(
+ (session) -> {
+ final List books = session.createSelectionQuery(
+ "from Book b order by b.id",
+ Book.class
+ )
+ .setFirstResult( 2 )
+ .setMaxResults( 1 ).getResultList();
+ // The custom dialect will fetch offset + limit + staticOffset rows
+ // Since staticOffset is -1, it must yield 2 rows
+ assertEquals( 2, books.size() );
+ }
+ );
+ }
+
+ @Entity(name = "Book")
+ public static class Book {
+ @Id
+ private Integer id;
+ private String title;
+
+ public Book() {
+ }
+
+ public Book(Integer id, String title) {
+ this.id = id;
+ this.title = title;
+ }
+ }
+
+
+ public static class TestSettingProvider implements SettingProvider.Provider {
+
+ @Override
+ public String getSetting() {
+ return TestDialect.class.getName();
+ }
+ }
+
+ public static class TestDialect extends H2Dialect {
+
+ public TestDialect(DialectResolutionInfo info) {
+ super( info );
+ }
+
+ public TestDialect() {
+ }
+
+ @Override
+ public SqlAstTranslatorFactory getSqlAstTranslatorFactory() {
+ return new StandardSqlAstTranslatorFactory() {
+ @Override
+ protected SqlAstTranslator buildTranslator(
+ SessionFactoryImplementor sessionFactory, Statement statement) {
+ return new H2SqlAstTranslator<>( sessionFactory, statement ) {
+ @Override
+ public void visitOffsetFetchClause(QueryPart queryPart) {
+ final Expression offsetClauseExpression;
+ final Expression fetchClauseExpression;
+ if ( queryPart.isRoot() && hasLimit() ) {
+ prepareLimitOffsetParameters();
+ offsetClauseExpression = getOffsetParameter();
+ fetchClauseExpression = getLimitParameter();
+ }
+ else {
+ assert queryPart.getFetchClauseType() == FetchClauseType.ROWS_ONLY;
+ offsetClauseExpression = queryPart.getOffsetClauseExpression();
+ fetchClauseExpression = queryPart.getFetchClauseExpression();
+ }
+ if ( offsetClauseExpression != null && fetchClauseExpression != null ) {
+ appendSql( " fetch first " );
+ getClauseStack().push( Clause.FETCH );
+ try {
+ renderFetchPlusOffsetExpressionAsSingleParameter(
+ fetchClauseExpression,
+ offsetClauseExpression,
+ -1
+ );
+ }
+ finally {
+ getClauseStack().pop();
+ }
+ appendSql( " rows only" );
+ }
+ }
+ };
+ }
+ };
+ }
+ }
+}
+
+
diff --git a/hibernate-community-dialects/src/test/java/org/hibernate/community/dialect/InformixFunctionTest.java b/hibernate-community-dialects/src/test/java/org/hibernate/community/dialect/InformixFunctionTest.java
index 91070aaf6587..762bf92940bf 100644
--- a/hibernate-community-dialects/src/test/java/org/hibernate/community/dialect/InformixFunctionTest.java
+++ b/hibernate-community-dialects/src/test/java/org/hibernate/community/dialect/InformixFunctionTest.java
@@ -181,6 +181,23 @@ public void testCurrentTimestamp(SessionFactoryScope scope) {
);
}
+ @Test
+ @TestForIssue(jiraKey = "HHH-18369")
+ public void testMatches(SessionFactoryScope scope) {
+ scope.inTransaction(
+ (session) -> {
+ String country = (String) session.createQuery(
+ "select e.country " +
+ "from Event e " +
+ "where e.id = :id and matches(e.country, :country) = 'T'" )
+ .setParameter( "id", event.id )
+ .setParameter( "country", "R*" )
+ .getSingleResult();
+ assertEquals( "Romania", country );
+ }
+ );
+ }
+
private Calendar todayCalendar() {
Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.HOUR_OF_DAY, 0);
diff --git a/hibernate-core/hibernate-core.gradle b/hibernate-core/hibernate-core.gradle
index 689fddb9b73b..8995db8eba19 100644
--- a/hibernate-core/hibernate-core.gradle
+++ b/hibernate-core/hibernate-core.gradle
@@ -45,6 +45,7 @@ dependencies {
compileOnly libs.jackson
compileOnly libs.jacksonXml
compileOnly dbLibs.postgresql
+ compileOnly dbLibs.edb
testImplementation project(':hibernate-testing')
testImplementation project(':hibernate-ant')
diff --git a/hibernate-core/src/main/antlr/org/hibernate/grammars/hql/HqlParser.g4 b/hibernate-core/src/main/antlr/org/hibernate/grammars/hql/HqlParser.g4
index b9a113092a4d..9b65eff78379 100644
--- a/hibernate-core/src/main/antlr/org/hibernate/grammars/hql/HqlParser.g4
+++ b/hibernate-core/src/main/antlr/org/hibernate/grammars/hql/HqlParser.g4
@@ -159,8 +159,7 @@ cycleClause
* A toplevel query of subquery, which may be a union or intersection of subqueries
*/
queryExpression
- : withClause? orderedQuery # SimpleQueryGroup
- | withClause? orderedQuery (setOperator orderedQuery)+ # SetQueryGroup
+ : withClause? orderedQuery (setOperator orderedQuery)*
;
/**
@@ -430,8 +429,6 @@ pathContinuation
* * VALUE( path )
* * KEY( path )
* * path[ selector ]
- * * ARRAY_GET( embeddableArrayPath, index ).path
- * * COALESCE( array1, array2 )[ selector ].path
*/
syntacticDomainPath
: treatedNavigablePath
@@ -439,10 +436,6 @@ syntacticDomainPath
| mapKeyNavigablePath
| simplePath indexedPathAccessFragment
| simplePath slicedPathAccessFragment
- | toOneFkReference
- | function pathContinuation
- | function indexedPathAccessFragment pathContinuation?
- | function slicedPathAccessFragment
;
/**
@@ -664,19 +657,21 @@ whereClause
predicate
//highest to lowest precedence
: LEFT_PAREN predicate RIGHT_PAREN # GroupedPredicate
- | expression IS NOT? NULL # IsNullPredicate
- | expression IS NOT? EMPTY # IsEmptyPredicate
- | expression IS NOT? TRUE # IsTruePredicate
- | expression IS NOT? FALSE # IsFalsePredicate
- | expression IS NOT? DISTINCT FROM expression # IsDistinctFromPredicate
+ | expression IS NOT? (NULL|EMPTY|TRUE|FALSE) # UnaryIsPredicate
| expression NOT? MEMBER OF? path # MemberOfPredicate
| expression NOT? IN inList # InPredicate
| expression NOT? BETWEEN expression AND expression # BetweenPredicate
| expression NOT? (LIKE | ILIKE) expression likeEscape? # LikePredicate
- | expression NOT? CONTAINS expression # ContainsPredicate
- | expression NOT? INCLUDES expression # IncludesPredicate
- | expression NOT? INTERSECTS expression # IntersectsPredicate
- | expression comparisonOperator expression # ComparisonPredicate
+ | expression
+ ( NOT? (CONTAINS | INCLUDES | INTERSECTS)
+ | IS NOT? DISTINCT FROM
+ | EQUAL
+ | NOT_EQUAL
+ | GREATER
+ | GREATER_EQUAL
+ | LESS
+ | LESS_EQUAL
+ ) expression # BinaryExpressionPredicate
| EXISTS collectionQuantifier LEFT_PAREN simplePath RIGHT_PAREN # ExistsCollectionPartPredicate
| EXISTS expression # ExistsPredicate
| NOT predicate # NegatedPredicate
@@ -685,18 +680,6 @@ predicate
| expression # BooleanExpressionPredicate
;
-/**
- * An operator which compares values for equality or order
- */
-comparisonOperator
- : EQUAL
- | NOT_EQUAL
- | GREATER
- | GREATER_EQUAL
- | LESS
- | LESS_EQUAL
- ;
-
/**
* Any right operand of the 'in' operator
*
@@ -751,7 +734,14 @@ primaryExpression
| entityVersionReference # EntityVersionExpression
| entityNaturalIdReference # EntityNaturalIdExpression
| syntacticDomainPath pathContinuation? # SyntacticPathExpression
- | function # FunctionExpression
+ // ARRAY_GET( embeddableArrayPath, index ).path
+ // COALESCE( array1, array2 )[ selector ].path
+ // COALESCE( array1, array2 )[ start : end ]
+ | function (
+ pathContinuation
+ | slicedPathAccessFragment
+ | indexedPathAccessFragment pathContinuation?
+ )? # FunctionExpression
| generalPathFragment # GeneralPathExpression
;
@@ -1109,6 +1099,7 @@ function
| collectionFunctionMisuse
| jpaNonstandardFunction
| columnFunction
+ | toOneFkReference
| genericFunction
;
diff --git a/hibernate-core/src/main/java/org/hibernate/annotations/OnDelete.java b/hibernate-core/src/main/java/org/hibernate/annotations/OnDelete.java
index 8a748bdc5216..791200e790a9 100644
--- a/hibernate-core/src/main/java/org/hibernate/annotations/OnDelete.java
+++ b/hibernate-core/src/main/java/org/hibernate/annotations/OnDelete.java
@@ -17,10 +17,33 @@
/**
* Specifies an {@code on delete} action for a foreign key constraint.
* The most common usage is {@code @OnDelete(action = CASCADE)}.
+ *
* Note that this results in an {@code on delete cascade} clause in
* the DDL definition of the foreign key. It's completely different
* to {@link jakarta.persistence.CascadeType#REMOVE}.
*
+ * In fact, {@code @OnDelete} may be combined with {@code cascade=REMOVE}.
+ *
If {@code @OnDelete(action = CASCADE)} is used in conjunction
+ * with {@code cascade=REMOVE}, then associated entities are fetched
+ * from the database, marked deleted in the persistence context,
+ * and evicted from the second-level cache.
+ *
If {@code @OnDelete(action = CASCADE)} is used on its own,
+ * without {@code cascade=REMOVE}, then associated
+ * entities are not fetched from the database, are not marked
+ * deleted in the persistence context, and are not automatically
+ * evicted from the second-level cache.
+ *
+ *
* Like database triggers, {@code on delete} actions can cause state
* held in memory to lose synchronization with the database.
*
diff --git a/hibernate-core/src/main/java/org/hibernate/binder/internal/CommentBinder.java b/hibernate-core/src/main/java/org/hibernate/binder/internal/CommentBinder.java
index bafe20609a14..148962b76215 100644
--- a/hibernate-core/src/main/java/org/hibernate/binder/internal/CommentBinder.java
+++ b/hibernate-core/src/main/java/org/hibernate/binder/internal/CommentBinder.java
@@ -38,26 +38,30 @@ public void bind(Comment comment, MetadataBuildingContext context, PersistentCla
}
else if ( value instanceof Collection ) {
Collection collection = (Collection) value;
- Table table = collection.getTable();
+ Table table = collection.getCollectionTable();
// by default, the comment goes on the table
if ( on.isEmpty() || table.getName().equalsIgnoreCase( on ) ) {
table.setComment( text );
}
- // but if 'on' is explicit, it can go on a column
- Value element = collection.getElement();
- for ( Column column : element.getColumns() ) {
- if ( column.getName().equalsIgnoreCase( on ) ) {
- column.setComment( text );
+ else {
+ // but if 'on' is explicit, it can go on a column
+ for ( Column column : table.getColumns() ) {
+ if ( column.getName().equalsIgnoreCase( on ) ) {
+ column.setComment( text );
+ return;
+ }
}
+ throw new AnnotationException( "No matching column for '@Comment(on=\"" + on + "\")'" );
}
- //TODO: list index / map key columns
}
else {
for ( Column column : value.getColumns() ) {
if ( on.isEmpty() || column.getName().equalsIgnoreCase( on ) ) {
column.setComment( text );
+ return;
}
}
+ throw new AnnotationException( "No matching column for '@Comment(on=\"" + on + "\")'" );
}
}
@@ -70,12 +74,16 @@ public void bind(Comment comment, MetadataBuildingContext context, PersistentCla
if ( on.isEmpty() || primary.getName().equalsIgnoreCase( on ) ) {
primary.setComment( text );
}
- // but if 'on' is explicit, it can go on a secondary table
- for ( Join join : entity.getJoins() ) {
- Table secondary = join.getTable();
- if ( secondary.getName().equalsIgnoreCase( on ) ) {
- secondary.setComment( text );
+ else {
+ // but if 'on' is explicit, it can go on a secondary table
+ for ( Join join : entity.getJoins() ) {
+ Table secondary = join.getTable();
+ if ( secondary.getName().equalsIgnoreCase( on ) ) {
+ secondary.setComment( text );
+ return;
+ }
}
+ throw new AnnotationException( "No matching column for '@Comment(on=\"" + on + "\")'" );
}
}
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/beanvalidation/BeanValidationEventListener.java b/hibernate-core/src/main/java/org/hibernate/boot/beanvalidation/BeanValidationEventListener.java
index b579c91b16c5..574e560b0f1d 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/beanvalidation/BeanValidationEventListener.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/beanvalidation/BeanValidationEventListener.java
@@ -14,6 +14,8 @@
import org.hibernate.boot.internal.ClassLoaderAccessImpl;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.engine.spi.SessionFactoryImplementor;
+import org.hibernate.event.spi.PreCollectionUpdateEvent;
+import org.hibernate.event.spi.PreCollectionUpdateEventListener;
import org.hibernate.event.spi.PreDeleteEvent;
import org.hibernate.event.spi.PreDeleteEventListener;
import org.hibernate.event.spi.PreInsertEvent;
@@ -36,6 +38,8 @@
import jakarta.validation.Validator;
import jakarta.validation.ValidatorFactory;
+import static org.hibernate.internal.util.NullnessUtil.castNonNull;
+
/**
* Event listener used to enable Bean Validation for insert/update/delete events.
*
@@ -44,7 +48,7 @@
*/
//FIXME review exception model
public class BeanValidationEventListener
- implements PreInsertEventListener, PreUpdateEventListener, PreDeleteEventListener, PreUpsertEventListener {
+ implements PreInsertEventListener, PreUpdateEventListener, PreDeleteEventListener, PreUpsertEventListener, PreCollectionUpdateEventListener {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
CoreMessageLogger.class,
@@ -121,6 +125,17 @@ public boolean onPreUpsert(PreUpsertEvent event) {
return false;
}
+ @Override
+ public void onPreUpdateCollection(PreCollectionUpdateEvent event) {
+ final Object entity = castNonNull( event.getCollection().getOwner() );
+ validate(
+ entity,
+ event.getSession().getEntityPersister( event.getAffectedOwnerEntityName(), entity ),
+ event.getFactory(),
+ GroupsPerOperation.Operation.UPDATE
+ );
+ }
+
private void validate(
T object,
EntityPersister persister,
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/beanvalidation/HibernateTraversableResolver.java b/hibernate-core/src/main/java/org/hibernate/boot/beanvalidation/HibernateTraversableResolver.java
index 1058a19823e5..c0afa9d0dc1d 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/beanvalidation/HibernateTraversableResolver.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/beanvalidation/HibernateTraversableResolver.java
@@ -15,8 +15,11 @@
import org.hibernate.Hibernate;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.persister.entity.EntityPersister;
+import org.hibernate.type.AnyType;
import org.hibernate.type.CollectionType;
+import org.hibernate.type.ComponentType;
import org.hibernate.type.CompositeType;
+import org.hibernate.type.EntityType;
import org.hibernate.type.Type;
import jakarta.validation.Path;
@@ -54,17 +57,17 @@ private void addAssociationsToTheSetForAllProperties(String[] names, Type[] type
private void addAssociationsToTheSetForOneProperty(String name, Type type, String prefix, SessionFactoryImplementor factory) {
- if ( type.isCollectionType() ) {
+ if ( type instanceof CollectionType ) {
CollectionType collType = (CollectionType) type;
Type assocType = collType.getElementType( factory );
addAssociationsToTheSetForOneProperty(name, assocType, prefix, factory);
}
//ToOne association
- else if ( type.isEntityType() || type.isAnyType() ) {
+ else if ( type instanceof EntityType || type instanceof AnyType ) {
associations.add( prefix + name );
}
- else if ( type.isComponentType() ) {
- CompositeType componentType = (CompositeType) type;
+ else if ( type instanceof ComponentType ) {
+ ComponentType componentType = (ComponentType) type;
addAssociationsToTheSetForAllProperties(
componentType.getPropertyNames(),
componentType.getSubtypes(),
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/beanvalidation/TypeSafeActivator.java b/hibernate-core/src/main/java/org/hibernate/boot/beanvalidation/TypeSafeActivator.java
index b66b4d623ada..c8086f409e87 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/beanvalidation/TypeSafeActivator.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/beanvalidation/TypeSafeActivator.java
@@ -126,6 +126,7 @@ public static void applyCallbackListeners(ValidatorFactory validatorFactory, Act
listenerRegistry.appendListeners( EventType.PRE_UPDATE, listener );
listenerRegistry.appendListeners( EventType.PRE_DELETE, listener );
listenerRegistry.appendListeners( EventType.PRE_UPSERT, listener );
+ listenerRegistry.appendListeners( EventType.PRE_COLLECTION_UPDATE, listener );
listener.initialize( cfgService.getSettings(), classLoaderService );
}
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/internal/NamedHqlQueryDefinitionImpl.java b/hibernate-core/src/main/java/org/hibernate/boot/internal/NamedHqlQueryDefinitionImpl.java
index 284a97cf9699..5bdb30e81522 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/internal/NamedHqlQueryDefinitionImpl.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/internal/NamedHqlQueryDefinitionImpl.java
@@ -70,6 +70,7 @@ public String getHqlString() {
public NamedSqmQueryMemento resolve(SessionFactoryImplementor factory) {
return new NamedHqlQueryMementoImpl(
getRegistrationName(),
+ null,
hqlString,
firstResult,
maxResults,
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/internal/NamedNativeQueryDefinitionImpl.java b/hibernate-core/src/main/java/org/hibernate/boot/internal/NamedNativeQueryDefinitionImpl.java
index 688d8dc5e16e..987b873730b4 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/internal/NamedNativeQueryDefinitionImpl.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/internal/NamedNativeQueryDefinitionImpl.java
@@ -18,6 +18,8 @@
import org.hibernate.query.sql.internal.NamedNativeQueryMementoImpl;
import org.hibernate.query.sql.spi.NamedNativeQueryMemento;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
import static org.hibernate.internal.util.StringHelper.isNotEmpty;
/**
@@ -86,15 +88,16 @@ public String getResultSetMappingClassName() {
@Override
public NamedNativeQueryMemento resolve(SessionFactoryImplementor factory) {
+ Class> resultClass = isNotEmpty( resultSetMappingClassName )
+ ? factory.getServiceRegistry().requireService( ClassLoaderService.class ).classForName( resultSetMappingClassName )
+ : null;
return new NamedNativeQueryMementoImpl(
getRegistrationName(),
+ resultClass,
sqlString,
sqlString,
resultSetMappingName,
- isNotEmpty( resultSetMappingClassName )
- ? factory.getServiceRegistry().requireService( ClassLoaderService.class )
- .classForName( resultSetMappingClassName )
- : null,
+ resultClass,
querySpaces,
getCacheable(),
getCacheRegion(),
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/internal/SessionFactoryOptionsBuilder.java b/hibernate-core/src/main/java/org/hibernate/boot/internal/SessionFactoryOptionsBuilder.java
index 85008f7ccd40..b65f8a5282ae 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/internal/SessionFactoryOptionsBuilder.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/internal/SessionFactoryOptionsBuilder.java
@@ -7,6 +7,7 @@
package org.hibernate.boot.internal;
import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
@@ -70,6 +71,7 @@
import org.hibernate.resource.transaction.spi.TransactionCoordinatorBuilder;
import org.hibernate.stat.Statistics;
import org.hibernate.type.format.FormatMapper;
+import org.hibernate.type.format.FormatMapperCreationContext;
import org.hibernate.type.format.jackson.JacksonIntegration;
import org.hibernate.type.format.jakartajson.JakartaJsonIntegration;
import org.hibernate.type.format.jaxb.JaxbXmlFormatMapper;
@@ -313,13 +315,23 @@ public SessionFactoryOptionsBuilder(StandardServiceRegistry serviceRegistry, Boo
AvailableSettings.JPA_VALIDATION_FACTORY,
configurationSettings.get( AvailableSettings.JAKARTA_VALIDATION_FACTORY )
);
- this.jsonFormatMapper = determineJsonFormatMapper(
+
+ final var formatMapperCreationContext = new FormatMapperCreationContext() {
+ @Override
+ public BootstrapContext getBootstrapContext() {
+ return context;
+ }
+ };
+ jsonFormatMapper = jsonFormatMapper(
configurationSettings.get( AvailableSettings.JSON_FORMAT_MAPPER ),
- strategySelector
+ strategySelector,
+ formatMapperCreationContext
);
- this.xmlFormatMapper = determineXmlFormatMapper(
+
+ xmlFormatMapper = xmlFormatMapper(
configurationSettings.get( AvailableSettings.XML_FORMAT_MAPPER ),
- strategySelector
+ strategySelector,
+ formatMapperCreationContext
);
this.sessionFactoryName = (String) configurationSettings.get( SESSION_FACTORY_NAME );
@@ -827,7 +839,7 @@ private static Supplier extends Interceptor> interceptorSupplier(Class exten
}
private PhysicalConnectionHandlingMode interpretConnectionHandlingMode(
- Map configurationSettings,
+ Map configurationSettings,
StandardServiceRegistry serviceRegistry) {
final PhysicalConnectionHandlingMode specifiedHandlingMode = PhysicalConnectionHandlingMode.interpret(
configurationSettings.get( CONNECTION_HANDLING )
@@ -840,36 +852,62 @@ private PhysicalConnectionHandlingMode interpretConnectionHandlingMode(
return serviceRegistry.requireService( TransactionCoordinatorBuilder.class ).getDefaultConnectionHandlingMode();
}
- private static FormatMapper determineJsonFormatMapper(Object setting, StrategySelector strategySelector) {
- return strategySelector.resolveDefaultableStrategy(
- FormatMapper.class,
+ private static FormatMapper jsonFormatMapper(Object setting, StrategySelector selector, FormatMapperCreationContext creationContext) {
+ return formatMapper(
setting,
- (Callable) () -> {
- final FormatMapper jsonJacksonFormatMapper = JacksonIntegration.getJsonJacksonFormatMapperOrNull();
- if (jsonJacksonFormatMapper != null) {
- return jsonJacksonFormatMapper;
- }
- else {
- return JakartaJsonIntegration.getJakartaJsonBFormatMapperOrNull();
- }
- }
+ selector,
+ () -> {
+ final FormatMapper jsonJacksonFormatMapper = JacksonIntegration.getJsonJacksonFormatMapperOrNull( creationContext );
+ return jsonJacksonFormatMapper != null
+ ? jsonJacksonFormatMapper
+ : JakartaJsonIntegration.getJakartaJsonBFormatMapperOrNull();
+ },
+ creationContext
);
}
- private static FormatMapper determineXmlFormatMapper(Object setting, StrategySelector strategySelector) {
- return strategySelector.resolveDefaultableStrategy(
- FormatMapper.class,
+ private static FormatMapper xmlFormatMapper(Object setting, StrategySelector selector, FormatMapperCreationContext creationContext) {
+ return formatMapper(
setting,
- (Callable) () -> {
- final FormatMapper jacksonFormatMapper = JacksonIntegration.getXMLJacksonFormatMapperOrNull();
- if (jacksonFormatMapper != null) {
- return jacksonFormatMapper;
- }
- return new JaxbXmlFormatMapper();
- }
+ selector,
+ () -> {
+ final FormatMapper jacksonFormatMapper = JacksonIntegration.getXMLJacksonFormatMapperOrNull( creationContext );
+ return jacksonFormatMapper != null
+ ? jacksonFormatMapper
+ : new JaxbXmlFormatMapper();
+ },
+ creationContext
);
}
+ private static FormatMapper formatMapper(Object setting, StrategySelector selector, Callable defaultResolver, FormatMapperCreationContext creationContext) {
+ return selector.resolveStrategy( FormatMapper.class, setting, defaultResolver, strategyClass -> {
+ try {
+ final Constructor extends FormatMapper> creationContextConstructor =
+ strategyClass.getDeclaredConstructor( FormatMapperCreationContext.class );
+ return creationContextConstructor.newInstance( creationContext );
+ }
+ catch (NoSuchMethodException e) {
+ // Ignore
+ }
+ catch (InvocationTargetException | InstantiationException | IllegalAccessException e) {
+ throw new StrategySelectionException(
+ String.format( "Could not instantiate named strategy class [%s]", strategyClass.getName() ),
+ e
+ );
+ }
+ try {
+ return strategyClass.getDeclaredConstructor().newInstance();
+ }
+ catch (Exception e) {
+ throw new StrategySelectionException(
+ String.format( "Could not instantiate named strategy class [%s]", strategyClass.getName() ),
+ e
+ );
+ }
+ } );
+ }
+
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// SessionFactoryOptionsState
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/TypeContributions.java b/hibernate-core/src/main/java/org/hibernate/boot/model/TypeContributions.java
index 4e12ec503c0d..21752b741bf8 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/model/TypeContributions.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/model/TypeContributions.java
@@ -58,7 +58,7 @@ default void contributeJdbcTypeConstructor(JdbcTypeConstructor typeConstructor)
* type for values of type {@link UserType#returnedClass()}.
*/
default void contributeType(UserType> type) {
- contributeType( type, type.returnedClass().getName() );
+ contributeType( type, type.returnedClass().getTypeName() );
}
/**
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/AggregateComponentSecondPass.java b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/AggregateComponentSecondPass.java
index 8872cfb80a51..d57fa9b82273 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/AggregateComponentSecondPass.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/AggregateComponentSecondPass.java
@@ -15,6 +15,7 @@
import org.hibernate.MappingException;
import org.hibernate.annotations.Comment;
import org.hibernate.annotations.common.reflection.XClass;
+import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Database;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.QualifiedName;
@@ -109,7 +110,8 @@ public void doSecondPass(Map persistentClasses) throws
orderColumns( registeredUdt, originalOrder );
}
else {
- addAuxiliaryObjects = false;
+ addAuxiliaryObjects =
+ isAggregateArray() && namespace.locateUserDefinedArrayType( Identifier.toIdentifier( aggregateColumn.getSqlType() ) ) == null;
validateEqual( registeredUdt, udt );
}
}
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/AnnotatedColumns.java b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/AnnotatedColumns.java
index 96d0e30b2eeb..954a83cc4a8b 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/AnnotatedColumns.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/AnnotatedColumns.java
@@ -106,7 +106,7 @@ public boolean isSecondary() {
final String explicitTableName = firstColumn.getExplicitTableName();
//note: checkPropertyConsistency() is responsible for ensuring they all have the same table name
return isNotEmpty( explicitTableName )
- && !getPropertyHolder().getTable().getName().equals( explicitTableName );
+ && !getOwnerTable().getName().equals( explicitTableName );
}
/**
@@ -125,10 +125,18 @@ public Table getTable() {
// all the columns have to be mapped to the same table
// even though at the annotation level it looks like
// they could each specify a different table
- return isSecondary() ? getJoin().getTable() : getPropertyHolder().getTable();
+ return isSecondary() ? getJoin().getTable() : getOwnerTable();
}
}
+ private Table getOwnerTable() {
+ PropertyHolder holder = getPropertyHolder();
+ while ( holder instanceof ComponentPropertyHolder ) {
+ holder = ( (ComponentPropertyHolder) holder ).parent;
+ }
+ return holder.getTable();
+ }
+
public void setTable(Table table) {
this.table = table;
}
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/AnnotationBinder.java b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/AnnotationBinder.java
index c0d55e21caf4..bb246dfc32a5 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/AnnotationBinder.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/AnnotationBinder.java
@@ -42,7 +42,6 @@
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.internal.CoreMessageLogger;
-import org.hibernate.internal.util.StringHelper;
import org.hibernate.resource.beans.internal.FallbackBeanInstanceProducer;
import org.hibernate.resource.beans.spi.ManagedBeanRegistry;
import org.hibernate.type.descriptor.java.BasicJavaType;
@@ -69,7 +68,6 @@
import static org.hibernate.boot.model.internal.AnnotatedClassType.EMBEDDABLE;
import static org.hibernate.boot.model.internal.AnnotatedClassType.ENTITY;
-import static org.hibernate.boot.model.internal.FilterDefBinder.bindFilterDefs;
import static org.hibernate.boot.model.internal.GeneratorBinder.buildGenerators;
import static org.hibernate.boot.model.internal.GeneratorBinder.buildIdGenerator;
import static org.hibernate.boot.model.internal.InheritanceState.getInheritanceStateOfSuperEntity;
@@ -226,7 +224,7 @@ public static void bindPackage(ClassLoaderService cls, String packageName, Metad
bindGenericGenerators( annotatedPackage, context );
bindQueries( annotatedPackage, context );
- bindFilterDefs( annotatedPackage, context );
+ FilterDefBinder.bindFilterDefs( annotatedPackage, context );
}
private static void handleIdGenerators(XPackage annotatedPackage, MetadataBuildingContext context) {
@@ -371,6 +369,12 @@ private static void bindNamedStoredProcedureQuery(
}
}
+ public static void bindFilterDefs(
+ XClass annotatedClass,
+ MetadataBuildingContext context) throws MappingException {
+ FilterDefBinder.bindFilterDefs( annotatedClass, context );
+ }
+
/**
* Bind an annotated class. A subclass must be bound after its superclass.
*
@@ -388,7 +392,6 @@ public static void bindClass(
bindQueries( annotatedClass, context );
handleImport( annotatedClass, context );
- bindFilterDefs( annotatedClass, context );
bindTypeDescriptorRegistrations( annotatedClass, context );
bindEmbeddableInstantiatorRegistrations( annotatedClass, context );
bindUserTypeRegistrations( annotatedClass, context );
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/ClassPropertyHolder.java b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/ClassPropertyHolder.java
index 995d43a6e763..720e9e76614b 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/ClassPropertyHolder.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/ClassPropertyHolder.java
@@ -224,6 +224,19 @@ public void addProperty(Property prop, XClass declaringClass) {
}
}
+ @Override
+ public void movePropertyToJoin(Property property, Join join, XClass declaringClass) {
+ if ( property.getValue() instanceof Component ) {
+ //TODO handle quote and non quote table comparison
+ final String tableName = property.getValue().getTable().getName();
+ if ( getJoinsPerRealTableName().get( tableName ) == join ) {
+ // Skip moving the property, since it was already added to the join
+ return;
+ }
+ }
+ persistentClass.movePropertyToJoin( property, join );
+ }
+
@Override
public Join addJoin(JoinTable joinTableAnn, boolean noDelayInPkColumnCreation) {
final Join join = entityBinder.addJoin( joinTableAnn, this, noDelayInPkColumnCreation );
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/CollectionBinder.java b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/CollectionBinder.java
index 786e6dc7f875..5432f18e16d5 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/CollectionBinder.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/CollectionBinder.java
@@ -2340,8 +2340,7 @@ private AnnotatedClassType annotatedElementType(
}
else {
//force in case of attribute override
- final boolean attributeOverride = property.isAnnotationPresent( AttributeOverride.class )
- || property.isAnnotationPresent( AttributeOverrides.class );
+ final boolean attributeOverride = mappingDefinedAttributeOverrideOnElement(property);
// todo : force in the case of Convert annotation(s) with embedded paths (beyond key/value prefixes)?
return isEmbedded || attributeOverride
? EMBEDDABLE
@@ -2349,6 +2348,11 @@ private AnnotatedClassType annotatedElementType(
}
}
+ protected boolean mappingDefinedAttributeOverrideOnElement(XProperty property) {
+ return property.isAnnotationPresent( AttributeOverride.class )
+ || property.isAnnotationPresent( AttributeOverrides.class );
+ }
+
static AnnotatedColumns createElementColumnsIfNecessary(
Collection collection,
AnnotatedColumns elementColumns,
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/CollectionPropertyHolder.java b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/CollectionPropertyHolder.java
index ca42acc34155..fba3931e83ae 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/CollectionPropertyHolder.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/CollectionPropertyHolder.java
@@ -286,6 +286,11 @@ public void addProperty(Property prop, XClass declaringClass) {
throw new AssertionFailure( "Cannot add property to a collection" );
}
+ @Override
+ public void movePropertyToJoin(Property prop, Join join, XClass declaringClass) {
+ throw new AssertionFailure( "Cannot add property to a collection" );
+ }
+
@Override
public KeyValue getIdentifier() {
throw new AssertionFailure( "Identifier collection not yet managed" );
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/ComponentPropertyHolder.java b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/ComponentPropertyHolder.java
index a9ae72117918..b94ea059a71b 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/ComponentPropertyHolder.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/ComponentPropertyHolder.java
@@ -266,26 +266,31 @@ public String getEntityName() {
@Override
public void addProperty(Property property, AnnotatedColumns columns, XClass declaringClass) {
- //Ejb3Column.checkPropertyConsistency( ); //already called earlier
+ //AnnotatedColumns.checkPropertyConsistency( ); //already called earlier
// Check table matches between the component and the columns
// if not, change the component table if no properties are set
// if a property is set already the core cannot support that
- if ( columns != null ) {
- final Table table = columns.getTable();
- if ( !table.equals( getTable() ) ) {
- if ( component.getPropertySpan() == 0 ) {
- component.setTable( table );
- }
- else {
- throw new AnnotationException(
- "Embeddable class '" + component.getComponentClassName()
- + "' has properties mapped to two different tables"
- + " (all properties of the embeddable class must map to the same table)"
- );
- }
+ assert columns == null || property.getValue().getTable() == columns.getTable();
+ setTable( property.getValue().getTable() );
+ addProperty( property, declaringClass );
+ }
+
+ private void setTable(Table table) {
+ if ( !table.equals( getTable() ) ) {
+ if ( component.getPropertySpan() == 0 ) {
+ component.setTable( table );
+ }
+ else {
+ throw new AnnotationException(
+ "Embeddable class '" + component.getComponentClassName()
+ + "' has properties mapped to two different tables"
+ + " (all properties of the embeddable class must map to the same table)"
+ );
+ }
+ if ( parent instanceof ComponentPropertyHolder ) {
+ ( (ComponentPropertyHolder) parent ).setTable( table );
}
}
- addProperty( property, declaringClass );
}
@Override
@@ -330,6 +335,16 @@ public void addProperty(Property prop, XClass declaringClass) {
component.addProperty( prop, declaringClass );
}
+ @Override
+ public void movePropertyToJoin(Property prop, Join join, XClass declaringClass) {
+ // or maybe only throw if component.getTable() != join.getTable()
+ throw new AnnotationException(
+ "Embeddable class '" + component.getComponentClassName()
+ + "' has an unowned @OneToOne property " + prop.getName()
+ + "mapped to a join table which is unsupported"
+ );
+ }
+
@Override
public KeyValue getIdentifier() {
return component.getOwner().getIdentifier();
diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/EmbeddableBinder.java b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/EmbeddableBinder.java
index 92243de3333b..f06189c55fd0 100644
--- a/hibernate-core/src/main/java/org/hibernate/boot/model/internal/EmbeddableBinder.java
+++ b/hibernate-core/src/main/java/org/hibernate/boot/model/internal/EmbeddableBinder.java
@@ -11,11 +11,13 @@
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
-import java.util.TreeMap;
import org.hibernate.AnnotationException;
+import org.hibernate.MappingException;
import org.hibernate.annotations.DiscriminatorFormula;
import org.hibernate.annotations.Instantiator;
import org.hibernate.annotations.TypeBinderType;
@@ -28,7 +30,6 @@
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.PropertyData;
import org.hibernate.internal.CoreMessageLogger;
-import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.BasicValue;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.Property;
@@ -66,19 +67,15 @@
import static org.hibernate.boot.model.internal.BinderHelper.getPropertyOverriddenByMapperOrMapsId;
import static org.hibernate.boot.model.internal.BinderHelper.getRelativePath;
import static org.hibernate.boot.model.internal.BinderHelper.hasToOneAnnotation;
-import static org.hibernate.boot.model.internal.BinderHelper.isGlobalGeneratorNameGlobal;
-import static org.hibernate.boot.model.internal.GeneratorBinder.buildGenerators;
-import static org.hibernate.boot.model.internal.GeneratorBinder.generatorType;
-import static org.hibernate.boot.model.internal.GeneratorBinder.makeIdGenerator;
import static org.hibernate.boot.model.internal.HCANNHelper.findContainingAnnotations;
import static org.hibernate.boot.model.internal.PropertyBinder.addElementsOfClass;
import static org.hibernate.boot.model.internal.PropertyBinder.processElementAnnotations;
+import static org.hibernate.boot.model.internal.PropertyBinder.processId;
import static org.hibernate.boot.model.internal.PropertyHolderBuilder.buildPropertyHolder;
import static org.hibernate.internal.CoreLogging.messageLogger;
import static org.hibernate.internal.util.StringHelper.isEmpty;
import static org.hibernate.internal.util.StringHelper.qualify;
import static org.hibernate.internal.util.StringHelper.unqualify;
-import static org.hibernate.mapping.SimpleValue.DEFAULT_ID_GEN_STRATEGY;
/**
* A binder responsible for interpreting {@link Embeddable} classes and producing
@@ -249,15 +246,15 @@ private static PropertyBinder createEmbeddedProperty(
final PropertyBinder binder = new PropertyBinder();
binder.setDeclaringClass( inferredData.getDeclaringClass() );
binder.setName( inferredData.getPropertyName() );
- binder.setValue(component);
+ binder.setValue( component );
binder.setProperty( inferredData.getProperty() );
binder.setAccessType( inferredData.getDefaultAccess() );
- binder.setEmbedded(isComponentEmbedded);
- binder.setHolder(propertyHolder);
- binder.setId(isId);
- binder.setEntityBinder(entityBinder);
- binder.setInheritanceStatePerClass(inheritanceStatePerClass);
- binder.setBuildingContext(context);
+ binder.setEmbedded( isComponentEmbedded );
+ binder.setHolder( propertyHolder );
+ binder.setId( isId );
+ binder.setEntityBinder( entityBinder );
+ binder.setInheritanceStatePerClass( inheritanceStatePerClass );
+ binder.setBuildingContext( context );
binder.makePropertyAndBind();
return binder;
}
@@ -413,7 +410,7 @@ static Component fillEmbeddable(
final BasicType> discriminatorType = (BasicType>) component.getDiscriminator().getType();
// Discriminator values are used to construct the embeddable domain
// type hierarchy so order of processing is important
- final Map